Chromium Code Reviews| OLD | NEW |
|---|---|
| (Empty) | |
| 1 // Copyright 2015 The Chromium Authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #include "media/video/capture/linux/v4l2_video_capture_delegate.h" | |
| 6 | |
| 7 #include <poll.h> | |
| 8 #include <sys/fcntl.h> | |
| 9 #include <sys/ioctl.h> | |
| 10 #include <sys/mman.h> | |
| 11 | |
| 12 #include "base/bind.h" | |
| 13 #include "base/files/file_enumerator.h" | |
| 14 #include "base/posix/eintr_wrapper.h" | |
| 15 #include "base/strings/stringprintf.h" | |
| 16 #include "media/base/bind_to_current_loop.h" | |
| 17 #include "media/video/capture/linux/video_capture_device_linux.h" | |
| 18 | |
| 19 namespace media { | |
| 20 | |
| 21 // Max number of video buffers VideoCaptureDeviceLinux can allocate. | |
| 22 const uint32_t kMaxVideoBuffers = 2; | |
| 23 // Timeout in milliseconds v4l2_thread_ blocks waiting for a frame from the hw. | |
| 24 const int kCaptureTimeoutMs = 200; | |
| 25 // The number of continuous timeouts tolerated before treated as error. | |
| 26 const int kContinuousTimeoutLimit = 10; | |
| 27 // MJPEG is preferred if the requested width or height is larger than this. | |
| 28 const int kMjpegWidth = 640; | |
| 29 const int kMjpegHeight = 480; | |
| 30 // Typical framerate, in fps | |
| 31 const int kTypicalFramerate = 30; | |
| 32 | |
| 33 // V4L2 color formats supported by V4L2CaptureDelegateSinglePlane. This list is | |
| 34 // ordered by precedence of use. | |
| 35 static const uint32_t kSinglePlaneSupportedFormats[] = { | |
| 36 V4L2_PIX_FMT_YUV420, | |
| 37 V4L2_PIX_FMT_YUYV, | |
| 38 V4L2_PIX_FMT_UYVY}; | |
| 39 | |
| 40 // List of supported formats and their respective amount of sub-buffers for | |
| 41 // V4L2CaptureDelegateMultiPlane. | |
| 42 static const struct { | |
| 43 uint32_t fourcc; | |
| 44 size_t num_planes; | |
| 45 } kMultiPlaneSupportedFormats[] = { | |
| 46 {V4L2_PIX_FMT_YUV420M, 3} | |
| 47 // TODO(mcasas): add V4L2_PIX_FMT_YVU420M when available in bots. | |
| 48 }; | |
| 49 | |
| 50 // Returns the input fourcc as a std::string four char representation. | |
| 51 static std::string FourccToString(uint32_t fourcc) { | |
| 52 return base::StringPrintf("%c%c%c%c", fourcc & 0xFF, (fourcc >> 8) & 0xFF, | |
| 53 (fourcc >> 16) & 0xFF, (fourcc >> 24) & 0xFF); | |
| 54 } | |
| 55 | |
| 56 static std::list<uint32_t> GetListOfUsableFourCcsSinglePlane() { | |
| 57 return std::list<uint32_t>( | |
| 58 kSinglePlaneSupportedFormats, | |
| 59 kSinglePlaneSupportedFormats + arraysize(kSinglePlaneSupportedFormats)); | |
| 60 } | |
| 61 | |
| 62 static size_t GetNumPlanesForFourCc(uint32_t fourcc) { | |
| 63 for (const auto& fourcc_and_pixel_format : kMultiPlaneSupportedFormats) { | |
| 64 if (fourcc_and_pixel_format.fourcc == fourcc) | |
| 65 return fourcc_and_pixel_format.num_planes; | |
| 66 } | |
| 67 NOTREACHED() << "Unknown fourcc " << FourccToString(fourcc); | |
| 68 return 0; | |
| 69 } | |
| 70 | |
| 71 static std::list<uint32_t> GetListOfUsableFourCcsMultiPlane() { | |
| 72 std::list<uint32_t> supported_formats; | |
| 73 for (const auto& i : kMultiPlaneSupportedFormats) | |
| 74 supported_formats.push_back(i.fourcc); | |
| 75 return supported_formats; | |
| 76 } | |
| 77 | |
| 78 // Class keeping track of SPLANE/MPLANE V4L2 buffers, mmap()ed on construction | |
| 79 // and munmap()ed on destruction. Destruction is syntactically equal for | |
| 80 // S/MPLANE but not construction, so this is implemented in derived classes. | |
| 81 // Internally it has a vector of planes, which for SPLANE will contain only | |
| 82 // one element. | |
| 83 class V4L2VideoCaptureDelegate::BufferTracker | |
| 84 : public base::RefCounted<BufferTracker> { | |
| 85 public: | |
| 86 struct Plane { | |
| 87 void* start; | |
| 88 size_t length; | |
| 89 }; | |
| 90 virtual bool Init(int fd, const v4l2_buffer& buffer) = 0; | |
| 91 | |
| 92 uint8_t* const GetPlaneStart(size_t plane) const { | |
| 93 return static_cast<uint8_t* const>(planes_[plane].start); | |
| 94 } | |
| 95 size_t GetPlaneLength(size_t plane) const { return planes_[plane].length; } | |
| 96 | |
| 97 std::vector<Plane>& planes() { return planes_; } | |
| 98 | |
| 99 protected: | |
| 100 friend class base::RefCounted<BufferTracker>; | |
| 101 virtual ~BufferTracker(); | |
| 102 | |
| 103 private: | |
| 104 std::vector<Plane> planes_; | |
| 105 }; | |
| 106 | |
| 107 // V4L2 specifics for SPLANE API. | |
| 108 class V4L2CaptureDelegateSinglePlane final : public V4L2VideoCaptureDelegate { | |
| 109 public: | |
| 110 V4L2CaptureDelegateSinglePlane( | |
| 111 const VideoCaptureDevice::Name& device_name, | |
| 112 const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner, | |
| 113 int power_line_frequency) | |
| 114 : V4L2VideoCaptureDelegate(device_name, | |
| 115 v4l2_task_runner, | |
| 116 power_line_frequency) {} | |
| 117 | |
| 118 private: | |
| 119 // BufferTracker derivation to implement construction semantics for SPLANE. | |
| 120 class BufferTrackerSPlane final : public BufferTracker { | |
| 121 public: | |
| 122 bool Init(int fd, const v4l2_buffer& buffer) override; | |
| 123 | |
| 124 private: | |
| 125 ~BufferTrackerSPlane() override {}; | |
| 126 }; | |
| 127 | |
| 128 ~V4L2CaptureDelegateSinglePlane() override {}; | |
| 129 | |
| 130 // V4L2VideoCaptureDelegate virtual methods implementation. | |
| 131 scoped_refptr<BufferTracker> CreateBufferTracker() override; | |
| 132 bool FillV4L2Format(v4l2_format* format, | |
| 133 uint32_t width, | |
| 134 uint32_t height, | |
| 135 uint32_t pixelformat_fourcc) override; | |
| 136 void FinishFillingV4L2Buffer(v4l2_buffer* buffer) const override; | |
| 137 void SendBuffer(const scoped_refptr<BufferTracker>& buffer) override; | |
| 138 }; | |
| 139 | |
| 140 // V4L2 specifics for MPLANE API. | |
| 141 class V4L2CaptureDelegateMultiPlane final : public V4L2VideoCaptureDelegate { | |
| 142 public: | |
| 143 V4L2CaptureDelegateMultiPlane( | |
| 144 const VideoCaptureDevice::Name& device_name, | |
| 145 const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner, | |
| 146 int power_line_frequency) | |
| 147 : V4L2VideoCaptureDelegate(device_name, | |
| 148 v4l2_task_runner, | |
| 149 power_line_frequency), | |
| 150 fourcc_(0), | |
| 151 num_planes_(0) {} | |
| 152 | |
| 153 private: | |
| 154 // BufferTracker derivation to implement construction semantics for MPLANE. | |
| 155 class BufferTrackerMPlane final : public BufferTracker { | |
| 156 public: | |
| 157 bool Init(int fd, const v4l2_buffer& buffer) override; | |
| 158 | |
| 159 private: | |
| 160 ~BufferTrackerMPlane() override {}; | |
| 161 }; | |
| 162 | |
| 163 ~V4L2CaptureDelegateMultiPlane() override {}; | |
| 164 | |
| 165 // V4L2VideoCaptureDelegate virtual methods implementation. | |
| 166 scoped_refptr<BufferTracker> CreateBufferTracker() override; | |
| 167 bool FillV4L2Format(v4l2_format* format, | |
| 168 uint32_t width, | |
| 169 uint32_t height, | |
| 170 uint32_t pixelformat_fourcc) override; | |
| 171 void FinishFillingV4L2Buffer(v4l2_buffer* buffer) const override; | |
| 172 void SendBuffer(const scoped_refptr<BufferTracker>& buffer) override; | |
| 173 | |
| 174 // Actual pixel format and number of planes, known after FillV4L2Format(). | |
| 175 uint32_t fourcc_; | |
| 176 size_t num_planes_; | |
| 177 | |
| 178 // Scoped_ptr to allocate and track as many v4l2_plane structs as planes, | |
| 179 // needed inside v4l2_buffer. | |
| 180 scoped_ptr<struct v4l2_plane[]> v4l2_plane_; | |
| 181 }; | |
| 182 | |
| 183 // static | |
| 184 scoped_refptr<V4L2VideoCaptureDelegate> | |
| 185 V4L2VideoCaptureDelegate::CreateV4L2VideoCaptureDelegate( | |
| 186 const VideoCaptureDevice::Name& device_name, | |
| 187 const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner, | |
| 188 int power_line_frequency) { | |
| 189 switch (device_name.capture_api_type()) { | |
| 190 case VideoCaptureDevice::Name::V4L2_SINGLE_PLANE: | |
| 191 return make_scoped_refptr(new V4L2CaptureDelegateSinglePlane( | |
| 192 device_name, v4l2_task_runner, power_line_frequency)); | |
| 193 case VideoCaptureDevice::Name::V4L2_MULTI_PLANE: | |
| 194 return make_scoped_refptr(new V4L2CaptureDelegateMultiPlane( | |
| 195 device_name, v4l2_task_runner, power_line_frequency)); | |
| 196 default: | |
| 197 NOTIMPLEMENTED() << "Unknown V4L2 capture API type"; | |
| 198 return scoped_refptr<V4L2VideoCaptureDelegate>(); | |
| 199 } | |
| 200 } | |
| 201 | |
| 202 // static | |
| 203 VideoPixelFormat V4L2VideoCaptureDelegate::V4l2FourCcToChromiumPixelFormat( | |
| 204 uint32_t v4l2_fourcc) { | |
| 205 const struct { | |
| 206 uint32_t fourcc; | |
| 207 VideoPixelFormat pixel_format; | |
| 208 } kFourCcAndChromiumPixelFormats[] = { | |
| 209 {V4L2_PIX_FMT_YUV420M, PIXEL_FORMAT_I420}, | |
| 210 {V4L2_PIX_FMT_YUV420, PIXEL_FORMAT_I420}, | |
| 211 {V4L2_PIX_FMT_YUYV, PIXEL_FORMAT_YUY2}, | |
| 212 {V4L2_PIX_FMT_UYVY, PIXEL_FORMAT_UYVY}, | |
| 213 {V4L2_PIX_FMT_MJPEG, PIXEL_FORMAT_MJPEG}, | |
| 214 {V4L2_PIX_FMT_JPEG, PIXEL_FORMAT_MJPEG}, | |
| 215 }; | |
| 216 for (const auto& fourcc_and_pixel_format : kFourCcAndChromiumPixelFormats) { | |
| 217 if (fourcc_and_pixel_format.fourcc == v4l2_fourcc) | |
| 218 return fourcc_and_pixel_format.pixel_format; | |
| 219 } | |
| 220 DVLOG(1) << "Unsupported pixel format: " << FourccToString(v4l2_fourcc); | |
| 221 return PIXEL_FORMAT_UNKNOWN; | |
| 222 } | |
| 223 | |
| 224 // static | |
| 225 std::list<uint32_t> V4L2VideoCaptureDelegate::GetListOfUsableFourCcs( | |
| 226 bool prefer_mjpeg) { | |
| 227 std::list<uint32_t> singleplane_formats = GetListOfUsableFourCcsSinglePlane(); | |
| 228 std::list<uint32_t> multiplane_formats = GetListOfUsableFourCcsMultiPlane(); | |
| 229 multiplane_formats.insert(multiplane_formats.end(), | |
| 230 singleplane_formats.begin(), | |
| 231 singleplane_formats.end()); | |
| 232 // Add MJPEG to the front or the back of the list depending on |prefer_mjpeg|. | |
| 233 if (prefer_mjpeg) | |
| 234 multiplane_formats.insert(multiplane_formats.begin(), V4L2_PIX_FMT_MJPEG); | |
| 235 else | |
| 236 multiplane_formats.insert(multiplane_formats.end(), V4L2_PIX_FMT_MJPEG); | |
| 237 | |
| 238 // JPEG works as MJPEG on some gspca webcams from field reports. | |
| 239 // Put it as the least preferred format. | |
| 240 multiplane_formats.push_back(V4L2_PIX_FMT_JPEG); | |
| 241 | |
| 242 return multiplane_formats; | |
| 243 } | |
| 244 | |
| 245 V4L2VideoCaptureDelegate::BufferTracker::~BufferTracker() { | |
| 246 for (const auto& plane : planes_) { | |
| 247 if (plane.start == NULL) | |
| 248 continue; | |
| 249 const int result = munmap(plane.start, plane.length); | |
| 250 PLOG_IF(ERROR, result < 0) << "Error munmap()ing V4L2 buffer"; | |
| 251 } | |
| 252 } | |
| 253 | |
| 254 V4L2VideoCaptureDelegate::V4L2VideoCaptureDelegate( | |
| 255 const VideoCaptureDevice::Name& device_name, | |
| 256 const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner, | |
| 257 int power_line_frequency) | |
| 258 : capture_type_((device_name.capture_api_type() == | |
| 259 VideoCaptureDevice::Name::V4L2_SINGLE_PLANE) | |
| 260 ? V4L2_BUF_TYPE_VIDEO_CAPTURE | |
| 261 : V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE), | |
| 262 v4l2_task_runner_(v4l2_task_runner), | |
| 263 device_name_(device_name), | |
| 264 power_line_frequency_(power_line_frequency), | |
| 265 is_capturing_(false), | |
| 266 timeout_count_(0), | |
| 267 rotation_(0) { | |
| 268 } | |
| 269 | |
| 270 void V4L2VideoCaptureDelegate::AllocateAndStart( | |
| 271 int width, | |
| 272 int height, | |
| 273 float frame_rate, | |
| 274 scoped_ptr<VideoCaptureDevice::Client> client) { | |
| 275 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); | |
| 276 DCHECK(client); | |
| 277 client_ = client.Pass(); | |
| 278 | |
| 279 // Need to open camera with O_RDWR after Linux kernel 3.3. | |
| 280 device_fd_.reset(HANDLE_EINTR(open(device_name_.id().c_str(), O_RDWR))); | |
| 281 if (!device_fd_.is_valid()) { | |
| 282 SetErrorState("Failed to open V4L2 device driver file."); | |
| 283 return; | |
| 284 } | |
| 285 | |
| 286 v4l2_capability cap = {}; | |
| 287 if (!((HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QUERYCAP, &cap)) == 0) && | |
| 288 ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE || | |
| 289 cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE) && | |
| 290 !(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT) && | |
| 291 !(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT_MPLANE)))) { | |
| 292 device_fd_.reset(); | |
| 293 SetErrorState("This is not a V4L2 video capture device"); | |
| 294 return; | |
| 295 } | |
| 296 | |
| 297 // Get supported video formats in preferred order. | |
| 298 // For large resolutions, favour mjpeg over raw formats. | |
| 299 const std::list<uint32_t>& desired_v4l2_formats = | |
| 300 GetListOfUsableFourCcs(width > kMjpegWidth || height > kMjpegHeight); | |
| 301 std::list<uint32_t>::const_iterator best = desired_v4l2_formats.end(); | |
| 302 | |
| 303 v4l2_fmtdesc fmtdesc = {}; | |
| 304 fmtdesc.type = capture_type_; | |
| 305 for (; HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_ENUM_FMT, &fmtdesc)) == 0; | |
| 306 ++fmtdesc.index) { | |
| 307 best = std::find(desired_v4l2_formats.begin(), best, fmtdesc.pixelformat); | |
| 308 } | |
| 309 if (best == desired_v4l2_formats.end()) { | |
| 310 SetErrorState("Failed to find a supported camera format."); | |
| 311 return; | |
| 312 } | |
| 313 | |
| 314 DVLOG(1) << "Chosen pixel format is " << FourccToString(*best); | |
| 315 | |
| 316 v4l2_format video_fmt = {}; | |
| 317 video_fmt.type = capture_type_; | |
| 318 if (!FillV4L2Format(&video_fmt, width, height, *best)) { | |
| 319 SetErrorState("Failed filling in V4L2 Format"); | |
| 320 return; | |
| 321 } | |
| 322 | |
| 323 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_FMT, &video_fmt)) < 0) { | |
| 324 SetErrorState("Failed to set video capture format"); | |
| 325 return; | |
| 326 } | |
| 327 const VideoPixelFormat pixel_format = | |
| 328 V4l2FourCcToChromiumPixelFormat(video_fmt.fmt.pix.pixelformat); | |
| 329 if (pixel_format == PIXEL_FORMAT_UNKNOWN) { | |
| 330 SetErrorState("Unsupported pixel format"); | |
| 331 return; | |
| 332 } | |
| 333 | |
| 334 // Set capture framerate in the form of capture interval. | |
| 335 v4l2_streamparm streamparm = {}; | |
| 336 streamparm.type = capture_type_; | |
| 337 // The following line checks that the driver knows about framerate get/set. | |
| 338 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_G_PARM, &streamparm)) >= 0) { | |
| 339 // Now check if the device is able to accept a capture framerate set. | |
| 340 if (streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) { | |
| 341 // |frame_rate| is float, approximate by a fraction. | |
| 342 streamparm.parm.capture.timeperframe.numerator = | |
| 343 media::kFrameRatePrecision; | |
| 344 streamparm.parm.capture.timeperframe.denominator = | |
| 345 (frame_rate) ? (frame_rate * media::kFrameRatePrecision) | |
| 346 : (kTypicalFramerate * media::kFrameRatePrecision); | |
| 347 | |
| 348 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_PARM, &streamparm)) < | |
| 349 0) { | |
| 350 SetErrorState("Failed to set camera framerate"); | |
| 351 return; | |
| 352 } | |
| 353 DVLOG(2) << "Actual camera driverframerate: " | |
| 354 << streamparm.parm.capture.timeperframe.denominator << "/" | |
| 355 << streamparm.parm.capture.timeperframe.numerator; | |
| 356 } | |
| 357 } | |
| 358 // TODO(mcasas): what should be done if the camera driver does not allow | |
| 359 // framerate configuration, or the actual one is different from the desired? | |
| 360 | |
| 361 // Set anti-banding/anti-flicker to 50/60Hz. May fail due to not supported | |
| 362 // operation (|errno| == EINVAL in this case) or plain failure. | |
| 363 if ((power_line_frequency_ == V4L2_CID_POWER_LINE_FREQUENCY_50HZ) || | |
| 364 (power_line_frequency_ == V4L2_CID_POWER_LINE_FREQUENCY_60HZ)) { | |
| 365 struct v4l2_control control = {}; | |
| 366 control.id = V4L2_CID_POWER_LINE_FREQUENCY; | |
| 367 control.value = power_line_frequency_; | |
| 368 HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_CTRL, &control)); | |
| 369 } | |
| 370 | |
| 371 capture_format_.frame_size.SetSize(video_fmt.fmt.pix.width, | |
| 372 video_fmt.fmt.pix.height); | |
| 373 capture_format_.frame_rate = frame_rate; | |
| 374 capture_format_.pixel_format = pixel_format; | |
| 375 | |
| 376 v4l2_requestbuffers r_buffer = {}; | |
| 377 r_buffer.type = capture_type_; | |
| 378 r_buffer.memory = V4L2_MEMORY_MMAP; | |
| 379 r_buffer.count = kMaxVideoBuffers; | |
| 380 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_REQBUFS, &r_buffer)) < 0) { | |
| 381 SetErrorState("Error requesting MMAP buffers from V4L2"); | |
| 382 return; | |
| 383 } | |
| 384 DCHECK_EQ(r_buffer.count, kMaxVideoBuffers); | |
| 385 for (unsigned int i = 0; i < r_buffer.count; ++i) { | |
| 386 if (!AllocateVideoBuffer(i)) { | |
| 387 SetErrorState("Allocate buffer failed"); | |
| 388 return; | |
| 389 } | |
| 390 } | |
| 391 | |
| 392 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_STREAMON, &capture_type_)) | |
| 393 < 0) { | |
| 394 SetErrorState("VIDIOC_STREAMON failed"); | |
| 395 return; | |
| 396 } | |
| 397 | |
| 398 is_capturing_ = true; | |
| 399 // Post task to start fetching frames from v4l2. | |
| 400 v4l2_task_runner_->PostTask( | |
| 401 FROM_HERE, base::Bind(&V4L2VideoCaptureDelegate::DoCapture, this)); | |
| 402 } | |
| 403 | |
| 404 void V4L2VideoCaptureDelegate::StopAndDeAllocate() { | |
| 405 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); | |
| 406 // The order is important: stop streaming, clear |buffer_pool_|, | |
| 407 // thus munmap()ing the v4l2_buffers, and then return them to the OS. | |
| 408 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_STREAMOFF, &capture_type_)) | |
| 409 < 0) { | |
| 410 SetErrorState("VIDIOC_STREAMOFF failed"); | |
| 411 return; | |
| 412 } | |
| 413 | |
| 414 buffer_tracker_pool_.clear(); | |
| 415 | |
| 416 v4l2_requestbuffers r_buffer = {}; | |
| 417 r_buffer.type = capture_type_; | |
| 418 r_buffer.memory = V4L2_MEMORY_MMAP; | |
| 419 r_buffer.count = 0; | |
| 420 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_REQBUFS, &r_buffer)) < 0) | |
| 421 SetErrorState("Failed to VIDIOC_REQBUFS with count = 0"); | |
| 422 | |
| 423 // At this point we can close the device. | |
| 424 // This is also needed for correctly changing settings later via VIDIOC_S_FMT. | |
| 425 device_fd_.reset(); | |
| 426 is_capturing_ = false; | |
| 427 client_.reset(); | |
| 428 } | |
| 429 | |
| 430 void V4L2VideoCaptureDelegate::SetRotation(int rotation) { | |
| 431 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); | |
| 432 DCHECK(rotation >= 0 && rotation < 360 && rotation % 90 == 0); | |
| 433 rotation_ = rotation; | |
| 434 } | |
| 435 | |
| 436 bool V4L2VideoCaptureDelegate::AllocateVideoBuffer(int index) { | |
| 437 v4l2_buffer buffer = {}; | |
| 438 FillV4L2Buffer(&buffer, index); | |
| 439 | |
| 440 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QUERYBUF, &buffer)) < 0) { | |
| 441 DLOG(ERROR) << "Error querying status of a MMAP V4L2 buffer"; | |
| 442 return false; | |
| 443 } | |
| 444 | |
| 445 const scoped_refptr<BufferTracker>& buffer_tracker = CreateBufferTracker(); | |
| 446 if (!buffer_tracker->Init(device_fd_.get(), buffer)) { | |
| 447 DLOG(ERROR) << "Error creating BufferTracker"; | |
| 448 return false; | |
| 449 } | |
| 450 buffer_tracker_pool_.push_back(buffer_tracker); | |
| 451 | |
| 452 // Enqueue the buffer in the drivers incoming queue. | |
| 453 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QBUF, &buffer)) < 0) { | |
| 454 DLOG(ERROR) << "Error enqueuing a V4L2 buffer back into the driver"; | |
| 455 return false; | |
| 456 } | |
| 457 return true; | |
| 458 } | |
| 459 | |
| 460 void V4L2VideoCaptureDelegate::FillV4L2Buffer(v4l2_buffer* buffer, | |
| 461 int i) const { | |
| 462 memset(buffer, 0, sizeof(*buffer)); | |
| 463 buffer->memory = V4L2_MEMORY_MMAP; | |
| 464 buffer->index = i; | |
| 465 FinishFillingV4L2Buffer(buffer); | |
| 466 } | |
| 467 | |
| 468 void V4L2VideoCaptureDelegate::DoCapture() { | |
| 469 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); | |
| 470 if (!is_capturing_) | |
| 471 return; | |
| 472 | |
| 473 pollfd device_pfd = {}; | |
| 474 device_pfd.fd = device_fd_.get(); | |
| 475 device_pfd.events = POLLIN; | |
| 476 const int result = HANDLE_EINTR(poll(&device_pfd, 1, kCaptureTimeoutMs)); | |
| 477 if (result < 0) { | |
| 478 SetErrorState("Poll failed"); | |
| 479 return; | |
| 480 } | |
| 481 // Check if poll() timed out; track the amount of times it did in a row and | |
| 482 // throw an error if it times out too many times. | |
| 483 if (result == 0) { | |
| 484 timeout_count_++; | |
| 485 if (timeout_count_ >= kContinuousTimeoutLimit) { | |
| 486 SetErrorState("Multiple continuous timeouts while read-polling."); | |
| 487 timeout_count_ = 0; | |
| 488 return; | |
| 489 } | |
| 490 } else { | |
| 491 timeout_count_ = 0; | |
| 492 } | |
| 493 | |
| 494 // Deenqueue, send and reenqueue a buffer if the driver has filled one in. | |
| 495 if (device_pfd.revents & POLLIN) { | |
| 496 v4l2_buffer buffer = {}; | |
| 497 buffer.type = capture_type_; | |
| 498 buffer.memory = V4L2_MEMORY_MMAP; | |
| 499 buffer.index = 0; | |
| 500 FinishFillingV4L2Buffer(&buffer); | |
| 501 | |
| 502 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_DQBUF, &buffer)) < 0) { | |
| 503 SetErrorState("Failed to dequeue capture buffer"); | |
| 504 return; | |
| 505 } | |
| 506 | |
| 507 SendBuffer(buffer_tracker_pool_[buffer.index]); | |
| 508 | |
| 509 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QBUF, &buffer)) < 0) { | |
| 510 SetErrorState("Failed to enqueue capture buffer"); | |
| 511 return; | |
| 512 } | |
| 513 } | |
| 514 | |
| 515 v4l2_task_runner_->PostTask( | |
| 516 FROM_HERE, base::Bind(&V4L2VideoCaptureDelegate::DoCapture, this)); | |
| 517 } | |
| 518 | |
| 519 void V4L2VideoCaptureDelegate::SetErrorState(const std::string& reason) { | |
| 520 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); | |
| 521 is_capturing_ = false; | |
| 522 client_->OnError(reason); | |
| 523 } | |
| 524 | |
| 525 V4L2VideoCaptureDelegate::~V4L2VideoCaptureDelegate() { | |
| 526 } | |
| 527 | |
| 528 scoped_refptr<V4L2VideoCaptureDelegate::BufferTracker> | |
| 529 V4L2CaptureDelegateSinglePlane::CreateBufferTracker() { | |
| 530 return make_scoped_refptr(new BufferTrackerSPlane()); | |
| 531 } | |
| 532 | |
| 533 bool V4L2CaptureDelegateSinglePlane::FillV4L2Format( | |
| 534 v4l2_format* format, | |
| 535 uint32_t width, | |
| 536 uint32_t height, | |
| 537 uint32_t pixelformat_fourcc) { | |
| 538 format->fmt.pix.width = width; | |
| 539 format->fmt.pix.height = height; | |
| 540 format->fmt.pix.pixelformat = pixelformat_fourcc; | |
| 541 return true; | |
| 542 } | |
| 543 | |
| 544 void V4L2CaptureDelegateSinglePlane::FinishFillingV4L2Buffer( | |
| 545 v4l2_buffer* buffer) const { | |
| 546 buffer->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | |
| 547 } | |
| 548 | |
| 549 void V4L2CaptureDelegateSinglePlane::SendBuffer( | |
| 550 const scoped_refptr<BufferTracker>& buffer) { | |
| 551 BufferTrackerSPlane* const buffer_tracker = | |
| 552 reinterpret_cast<BufferTrackerSPlane*>(buffer.get()); | |
| 553 client()->OnIncomingCapturedData( | |
| 554 buffer_tracker->GetPlaneStart(0), | |
| 555 buffer_tracker->GetPlaneLength(0), | |
| 556 capture_format(), | |
| 557 rotation(), | |
| 558 base::TimeTicks::Now()); | |
| 559 } | |
| 560 | |
| 561 bool V4L2CaptureDelegateSinglePlane::BufferTrackerSPlane::Init( | |
| 562 int fd, | |
| 563 const v4l2_buffer& buffer) { | |
| 564 Plane plane; | |
| 565 // Some devices require mmap() to be called with both READ and WRITE. | |
| 566 // See http://crbug.com/178582. | |
| 567 plane.start = mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, MAP_SHARED, | |
| 568 fd, buffer.m.offset); | |
| 569 if (plane.start == MAP_FAILED) { | |
| 570 DLOG(ERROR) << "Error mmap()ing a V4L2 buffer into userspace"; | |
| 571 return false; | |
| 572 } | |
| 573 plane.length = buffer.length; | |
| 574 planes().push_back(plane); | |
| 575 return true; | |
| 576 } | |
| 577 | |
| 578 scoped_refptr<V4L2VideoCaptureDelegate::BufferTracker> | |
| 579 V4L2CaptureDelegateMultiPlane::CreateBufferTracker() { | |
| 580 return make_scoped_refptr(new BufferTrackerMPlane()); | |
| 581 } | |
| 582 | |
| 583 bool V4L2CaptureDelegateMultiPlane::FillV4L2Format( | |
| 584 v4l2_format* format, | |
| 585 uint32_t width, | |
| 586 uint32_t height, | |
| 587 uint32_t pixelformat_fourcc) { | |
| 588 format->fmt.pix_mp.width = width; | |
| 589 format->fmt.pix_mp.height = height; | |
| 590 | |
| 591 fourcc_ = pixelformat_fourcc; | |
| 592 format->fmt.pix_mp.pixelformat = fourcc_; | |
| 593 | |
| 594 num_planes_ = GetNumPlanesForFourCc(fourcc_); | |
| 595 if (num_planes_ == 0u) | |
| 596 return false; | |
| 597 DCHECK_LE(num_planes_, static_cast<unsigned long>(VIDEO_MAX_PLANES)); | |
| 598 format->fmt.pix_mp.num_planes = num_planes_; | |
| 599 | |
| 600 v4l2_plane_.reset(new v4l2_plane[num_planes_]); | |
| 601 return true; | |
| 602 } | |
| 603 | |
| 604 void V4L2CaptureDelegateMultiPlane::FinishFillingV4L2Buffer( | |
| 605 v4l2_buffer* buffer) const { | |
| 606 buffer->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
| 607 buffer->length = num_planes_; | |
| 608 buffer->m.planes = v4l2_plane_.get(); | |
| 609 } | |
| 610 | |
| 611 void V4L2CaptureDelegateMultiPlane::SendBuffer( | |
| 612 const scoped_refptr<BufferTracker>& buffer) { | |
| 613 DCHECK_EQ(capture_format().pixel_format, PIXEL_FORMAT_I420); | |
| 614 | |
| 615 BufferTrackerMPlane* const buffer_tracker = | |
| 616 reinterpret_cast<BufferTrackerMPlane*>(buffer.get()); | |
| 617 client()->OnIncomingCapturedYuvData( | |
| 618 buffer_tracker->GetPlaneStart(0), | |
| 619 buffer_tracker->GetPlaneStart(1), | |
| 620 buffer_tracker->GetPlaneStart(2), | |
| 621 buffer_tracker->GetPlaneLength(0), | |
| 622 buffer_tracker->GetPlaneLength(1), | |
| 623 buffer_tracker->GetPlaneLength(2), | |
| 624 capture_format(), | |
| 625 rotation(), | |
| 626 base::TimeTicks::Now()); | |
| 627 } | |
| 628 | |
| 629 bool V4L2CaptureDelegateMultiPlane::BufferTrackerMPlane::Init( | |
|
magjed_chromium
2015/03/10 16:32:48
This function is very similar to V4L2CaptureDelega
mcasas
2015/03/10 20:10:32
It is indeed similar. IMHO the differences are
la
| |
| 630 int fd, | |
| 631 const v4l2_buffer& buffer) { | |
| 632 for (size_t p = 0; p < buffer.length; ++p) { | |
| 633 Plane plane; | |
| 634 plane.start = mmap(NULL, buffer.m.planes[p].length, PROT_READ | PROT_WRITE, | |
| 635 MAP_SHARED, fd, buffer.m.planes[p].m.mem_offset); | |
| 636 if (plane.start == MAP_FAILED) { | |
| 637 DLOG(ERROR) << "Error mmap()ing a V4L2 buffer into userspace"; | |
| 638 plane.start = nullptr; | |
| 639 return false; | |
| 640 } | |
| 641 plane.length = buffer.m.planes[p].length; | |
| 642 DVLOG(3) << "Mmap()ed plane #" << p << ", length " << plane.length << "B"; | |
| 643 planes().push_back(plane); | |
| 644 } | |
| 645 return true; | |
| 646 } | |
| 647 | |
| 648 } // namespace media | |
| OLD | NEW |