OLD | NEW |
(Empty) | |
| 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. |
| 4 |
| 5 #include "media/video/capture/linux/v4l2_video_capture_delegate.h" |
| 6 |
| 7 #include <poll.h> |
| 8 #include <sys/fcntl.h> |
| 9 #include <sys/ioctl.h> |
| 10 #include <sys/mman.h> |
| 11 |
| 12 #include "base/bind.h" |
| 13 #include "base/files/file_enumerator.h" |
| 14 #include "base/posix/eintr_wrapper.h" |
| 15 #include "base/strings/stringprintf.h" |
| 16 #include "media/base/bind_to_current_loop.h" |
| 17 #include "media/video/capture/linux/v4l2_capture_delegate_multi_plane.h" |
| 18 #include "media/video/capture/linux/v4l2_capture_delegate_single_plane.h" |
| 19 #include "media/video/capture/linux/video_capture_device_linux.h" |
| 20 |
| 21 namespace media { |
| 22 |
| 23 // Desired number of video buffers to allocate. The actual number of allocated |
| 24 // buffers by v4l2 driver can be higher or lower than this number. |
| 25 // kNumVideoBuffers should not be too small, or Chrome may not return enough |
| 26 // buffers back to driver in time. |
| 27 const uint32 kNumVideoBuffers = 4; |
| 28 // Timeout in milliseconds v4l2_thread_ blocks waiting for a frame from the hw. |
| 29 const int kCaptureTimeoutMs = 200; |
| 30 // The number of continuous timeouts tolerated before treated as error. |
| 31 const int kContinuousTimeoutLimit = 10; |
| 32 // MJPEG is preferred if the requested width or height is larger than this. |
| 33 const int kMjpegWidth = 640; |
| 34 const int kMjpegHeight = 480; |
| 35 // Typical framerate, in fps |
| 36 const int kTypicalFramerate = 30; |
| 37 |
| 38 // V4L2 color formats supported by V4L2VideoCaptureDelegate derived classes. |
| 39 // This list is ordered by precedence of use -- but see caveats for MJPEG. |
| 40 static struct{ |
| 41 uint32_t fourcc; |
| 42 VideoPixelFormat pixel_format; |
| 43 size_t num_planes; |
| 44 } const kSupportedFormatsAndPlanarity[] = { |
| 45 {V4L2_PIX_FMT_YUV420, PIXEL_FORMAT_I420, 1}, |
| 46 {V4L2_PIX_FMT_YUYV, PIXEL_FORMAT_YUY2, 1}, |
| 47 {V4L2_PIX_FMT_UYVY, PIXEL_FORMAT_UYVY, 1}, |
| 48 #if !defined(OS_OPENBSD) |
| 49 // TODO(mcasas): add V4L2_PIX_FMT_YVU420M when available in bots. |
| 50 {V4L2_PIX_FMT_YUV420M, PIXEL_FORMAT_I420, 3}, |
| 51 #endif |
| 52 // MJPEG is usually sitting fairly low since we don't want to have to decode. |
| 53 // However, is needed for large resolutions due to USB bandwidth limitations, |
| 54 // so GetListOfUsableFourCcs() can duplicate it on top, see that method. |
| 55 {V4L2_PIX_FMT_MJPEG, PIXEL_FORMAT_MJPEG, 1}, |
| 56 // JPEG works as MJPEG on some gspca webcams from field reports, see |
| 57 // https://code.google.com/p/webrtc/issues/detail?id=529, put it as the least |
| 58 // preferred format. |
| 59 {V4L2_PIX_FMT_JPEG, PIXEL_FORMAT_MJPEG, 1}, |
| 60 }; |
| 61 |
| 62 // static |
| 63 scoped_refptr<V4L2VideoCaptureDelegate> |
| 64 V4L2VideoCaptureDelegate::CreateV4L2VideoCaptureDelegate( |
| 65 const VideoCaptureDevice::Name& device_name, |
| 66 const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner, |
| 67 int power_line_frequency) { |
| 68 switch (device_name.capture_api_type()) { |
| 69 case VideoCaptureDevice::Name::V4L2_SINGLE_PLANE: |
| 70 return make_scoped_refptr(new V4L2CaptureDelegateSinglePlane( |
| 71 device_name, v4l2_task_runner, power_line_frequency)); |
| 72 case VideoCaptureDevice::Name::V4L2_MULTI_PLANE: |
| 73 #if !defined(OS_OPENBSD) |
| 74 return make_scoped_refptr(new V4L2CaptureDelegateMultiPlane( |
| 75 device_name, v4l2_task_runner, power_line_frequency)); |
| 76 default: |
| 77 #endif |
| 78 NOTIMPLEMENTED() << "Unknown V4L2 capture API type"; |
| 79 return scoped_refptr<V4L2VideoCaptureDelegate>(); |
| 80 } |
| 81 } |
| 82 |
| 83 //static |
| 84 size_t V4L2VideoCaptureDelegate::GetNumPlanesForFourCc(uint32_t fourcc) { |
| 85 for (const auto& fourcc_and_pixel_format : kSupportedFormatsAndPlanarity) { |
| 86 if (fourcc_and_pixel_format.fourcc == fourcc) |
| 87 return fourcc_and_pixel_format.num_planes; |
| 88 } |
| 89 DVLOG(1) << "Unknown fourcc " << FourccToString(fourcc); |
| 90 return 0; |
| 91 } |
| 92 |
| 93 // static |
| 94 VideoPixelFormat V4L2VideoCaptureDelegate::V4l2FourCcToChromiumPixelFormat( |
| 95 uint32_t v4l2_fourcc) { |
| 96 for (const auto& fourcc_and_pixel_format : kSupportedFormatsAndPlanarity) { |
| 97 if (fourcc_and_pixel_format.fourcc == v4l2_fourcc) |
| 98 return fourcc_and_pixel_format.pixel_format; |
| 99 } |
| 100 // Not finding a pixel format is OK during device capabilities enumeration. |
| 101 // Let the caller decide if PIXEL_FORMAT_UNKNOWN is an error or not. |
| 102 DVLOG(1) << "Unsupported pixel format: " << FourccToString(v4l2_fourcc); |
| 103 return PIXEL_FORMAT_UNKNOWN; |
| 104 } |
| 105 |
| 106 // static |
| 107 std::list<uint32_t> V4L2VideoCaptureDelegate::GetListOfUsableFourCcs( |
| 108 bool prefer_mjpeg) { |
| 109 std::list<uint32_t> supported_formats; |
| 110 for (const auto& format : kSupportedFormatsAndPlanarity) |
| 111 supported_formats.push_back(format.fourcc); |
| 112 |
| 113 // Duplicate MJPEG on top of the list depending on |prefer_mjpeg|. |
| 114 if (prefer_mjpeg) |
| 115 supported_formats.push_front(V4L2_PIX_FMT_MJPEG); |
| 116 |
| 117 return supported_formats; |
| 118 } |
| 119 |
| 120 //static |
| 121 std::string V4L2VideoCaptureDelegate::FourccToString(uint32_t fourcc) { |
| 122 return base::StringPrintf("%c%c%c%c", fourcc & 0xFF, (fourcc >> 8) & 0xFF, |
| 123 (fourcc >> 16) & 0xFF, (fourcc >> 24) & 0xFF); |
| 124 } |
| 125 |
| 126 V4L2VideoCaptureDelegate::BufferTracker::BufferTracker() { |
| 127 } |
| 128 |
| 129 V4L2VideoCaptureDelegate::BufferTracker::~BufferTracker() { |
| 130 for (const auto& plane : planes_) { |
| 131 if (plane.start == nullptr) |
| 132 continue; |
| 133 const int result = munmap(plane.start, plane.length); |
| 134 PLOG_IF(ERROR, result < 0) << "Error munmap()ing V4L2 buffer"; |
| 135 } |
| 136 } |
| 137 |
| 138 void V4L2VideoCaptureDelegate::BufferTracker::AddMmapedPlane( |
| 139 uint8_t* const start, |
| 140 size_t length) { |
| 141 Plane plane; |
| 142 plane.start = start; |
| 143 plane.length = length; |
| 144 planes_.push_back(plane); |
| 145 } |
| 146 |
| 147 V4L2VideoCaptureDelegate::V4L2VideoCaptureDelegate( |
| 148 const VideoCaptureDevice::Name& device_name, |
| 149 const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner, |
| 150 int power_line_frequency) |
| 151 : capture_type_((device_name.capture_api_type() == |
| 152 VideoCaptureDevice::Name::V4L2_SINGLE_PLANE) |
| 153 ? V4L2_BUF_TYPE_VIDEO_CAPTURE |
| 154 : V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE), |
| 155 v4l2_task_runner_(v4l2_task_runner), |
| 156 device_name_(device_name), |
| 157 power_line_frequency_(power_line_frequency), |
| 158 is_capturing_(false), |
| 159 timeout_count_(0), |
| 160 rotation_(0) { |
| 161 } |
| 162 |
| 163 V4L2VideoCaptureDelegate::~V4L2VideoCaptureDelegate() { |
| 164 } |
| 165 |
| 166 void V4L2VideoCaptureDelegate::AllocateAndStart( |
| 167 int width, |
| 168 int height, |
| 169 float frame_rate, |
| 170 scoped_ptr<VideoCaptureDevice::Client> client) { |
| 171 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); |
| 172 DCHECK(client); |
| 173 client_ = client.Pass(); |
| 174 |
| 175 // Need to open camera with O_RDWR after Linux kernel 3.3. |
| 176 device_fd_.reset(HANDLE_EINTR(open(device_name_.id().c_str(), O_RDWR))); |
| 177 if (!device_fd_.is_valid()) { |
| 178 SetErrorState("Failed to open V4L2 device driver file."); |
| 179 return; |
| 180 } |
| 181 |
| 182 v4l2_capability cap = {}; |
| 183 if (!((HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QUERYCAP, &cap)) == 0) && |
| 184 ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE || |
| 185 cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE) && |
| 186 !(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT) && |
| 187 !(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT_MPLANE)))) { |
| 188 device_fd_.reset(); |
| 189 SetErrorState("This is not a V4L2 video capture device"); |
| 190 return; |
| 191 } |
| 192 |
| 193 // Get supported video formats in preferred order. |
| 194 // For large resolutions, favour mjpeg over raw formats. |
| 195 const std::list<uint32_t>& desired_v4l2_formats = |
| 196 GetListOfUsableFourCcs(width > kMjpegWidth || height > kMjpegHeight); |
| 197 std::list<uint32_t>::const_iterator best = desired_v4l2_formats.end(); |
| 198 |
| 199 v4l2_fmtdesc fmtdesc = {}; |
| 200 fmtdesc.type = capture_type_; |
| 201 for (; HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_ENUM_FMT, &fmtdesc)) == 0; |
| 202 ++fmtdesc.index) { |
| 203 best = std::find(desired_v4l2_formats.begin(), best, fmtdesc.pixelformat); |
| 204 } |
| 205 if (best == desired_v4l2_formats.end()) { |
| 206 SetErrorState("Failed to find a supported camera format."); |
| 207 return; |
| 208 } |
| 209 |
| 210 DVLOG(1) << "Chosen pixel format is " << FourccToString(*best); |
| 211 |
| 212 v4l2_format video_fmt = {}; |
| 213 video_fmt.type = capture_type_; |
| 214 if (!FillV4L2Format(&video_fmt, width, height, *best)) { |
| 215 SetErrorState("Failed filling in V4L2 Format"); |
| 216 return; |
| 217 } |
| 218 |
| 219 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_FMT, &video_fmt)) < 0) { |
| 220 SetErrorState("Failed to set video capture format"); |
| 221 return; |
| 222 } |
| 223 const VideoPixelFormat pixel_format = |
| 224 V4l2FourCcToChromiumPixelFormat(video_fmt.fmt.pix.pixelformat); |
| 225 if (pixel_format == PIXEL_FORMAT_UNKNOWN) { |
| 226 SetErrorState("Unsupported pixel format"); |
| 227 return; |
| 228 } |
| 229 |
| 230 // Set capture framerate in the form of capture interval. |
| 231 v4l2_streamparm streamparm = {}; |
| 232 streamparm.type = capture_type_; |
| 233 // The following line checks that the driver knows about framerate get/set. |
| 234 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_G_PARM, &streamparm)) >= 0) { |
| 235 // Now check if the device is able to accept a capture framerate set. |
| 236 if (streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) { |
| 237 // |frame_rate| is float, approximate by a fraction. |
| 238 streamparm.parm.capture.timeperframe.numerator = |
| 239 media::kFrameRatePrecision; |
| 240 streamparm.parm.capture.timeperframe.denominator = |
| 241 (frame_rate) ? (frame_rate * media::kFrameRatePrecision) |
| 242 : (kTypicalFramerate * media::kFrameRatePrecision); |
| 243 |
| 244 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_PARM, &streamparm)) < |
| 245 0) { |
| 246 SetErrorState("Failed to set camera framerate"); |
| 247 return; |
| 248 } |
| 249 DVLOG(2) << "Actual camera driverframerate: " |
| 250 << streamparm.parm.capture.timeperframe.denominator << "/" |
| 251 << streamparm.parm.capture.timeperframe.numerator; |
| 252 } |
| 253 } |
| 254 // TODO(mcasas): what should be done if the camera driver does not allow |
| 255 // framerate configuration, or the actual one is different from the desired? |
| 256 |
| 257 // Set anti-banding/anti-flicker to 50/60Hz. May fail due to not supported |
| 258 // operation (|errno| == EINVAL in this case) or plain failure. |
| 259 if ((power_line_frequency_ == V4L2_CID_POWER_LINE_FREQUENCY_50HZ) || |
| 260 (power_line_frequency_ == V4L2_CID_POWER_LINE_FREQUENCY_60HZ) || |
| 261 (power_line_frequency_ == V4L2_CID_POWER_LINE_FREQUENCY_AUTO)) { |
| 262 struct v4l2_control control = {}; |
| 263 control.id = V4L2_CID_POWER_LINE_FREQUENCY; |
| 264 control.value = power_line_frequency_; |
| 265 const int retval = |
| 266 HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_CTRL, &control)); |
| 267 if (retval != 0) |
| 268 DVLOG(1) << "Error setting power line frequency removal"; |
| 269 } |
| 270 |
| 271 capture_format_.frame_size.SetSize(video_fmt.fmt.pix.width, |
| 272 video_fmt.fmt.pix.height); |
| 273 capture_format_.frame_rate = frame_rate; |
| 274 capture_format_.pixel_format = pixel_format; |
| 275 |
| 276 v4l2_requestbuffers r_buffer = {}; |
| 277 r_buffer.type = capture_type_; |
| 278 r_buffer.memory = V4L2_MEMORY_MMAP; |
| 279 r_buffer.count = kNumVideoBuffers; |
| 280 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_REQBUFS, &r_buffer)) < 0) { |
| 281 SetErrorState("Error requesting MMAP buffers from V4L2"); |
| 282 return; |
| 283 } |
| 284 DCHECK_LE(r_buffer.count, kNumVideoBuffers); |
| 285 for (unsigned int i = 0; i < r_buffer.count; ++i) { |
| 286 if (!MapAndQueueBuffer(i)) { |
| 287 SetErrorState("Allocate buffer failed"); |
| 288 return; |
| 289 } |
| 290 } |
| 291 |
| 292 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_STREAMON, &capture_type_)) |
| 293 < 0) { |
| 294 SetErrorState("VIDIOC_STREAMON failed"); |
| 295 return; |
| 296 } |
| 297 |
| 298 is_capturing_ = true; |
| 299 // Post task to start fetching frames from v4l2. |
| 300 v4l2_task_runner_->PostTask( |
| 301 FROM_HERE, base::Bind(&V4L2VideoCaptureDelegate::DoCapture, this)); |
| 302 } |
| 303 |
| 304 void V4L2VideoCaptureDelegate::StopAndDeAllocate() { |
| 305 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); |
| 306 // The order is important: stop streaming, clear |buffer_pool_|, |
| 307 // thus munmap()ing the v4l2_buffers, and then return them to the OS. |
| 308 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_STREAMOFF, &capture_type_)) |
| 309 < 0) { |
| 310 SetErrorState("VIDIOC_STREAMOFF failed"); |
| 311 return; |
| 312 } |
| 313 |
| 314 buffer_tracker_pool_.clear(); |
| 315 |
| 316 v4l2_requestbuffers r_buffer = {}; |
| 317 r_buffer.type = capture_type_; |
| 318 r_buffer.memory = V4L2_MEMORY_MMAP; |
| 319 r_buffer.count = 0; |
| 320 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_REQBUFS, &r_buffer)) < 0) |
| 321 SetErrorState("Failed to VIDIOC_REQBUFS with count = 0"); |
| 322 |
| 323 // At this point we can close the device. |
| 324 // This is also needed for correctly changing settings later via VIDIOC_S_FMT. |
| 325 device_fd_.reset(); |
| 326 is_capturing_ = false; |
| 327 client_.reset(); |
| 328 } |
| 329 |
| 330 void V4L2VideoCaptureDelegate::SetRotation(int rotation) { |
| 331 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); |
| 332 DCHECK(rotation >= 0 && rotation < 360 && rotation % 90 == 0); |
| 333 rotation_ = rotation; |
| 334 } |
| 335 |
| 336 bool V4L2VideoCaptureDelegate::MapAndQueueBuffer(int index) { |
| 337 v4l2_buffer buffer; |
| 338 FillV4L2Buffer(&buffer, index); |
| 339 |
| 340 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QUERYBUF, &buffer)) < 0) { |
| 341 DLOG(ERROR) << "Error querying status of a MMAP V4L2 buffer"; |
| 342 return false; |
| 343 } |
| 344 |
| 345 const scoped_refptr<BufferTracker>& buffer_tracker = CreateBufferTracker(); |
| 346 if (!buffer_tracker->Init(device_fd_.get(), buffer)) { |
| 347 DLOG(ERROR) << "Error creating BufferTracker"; |
| 348 return false; |
| 349 } |
| 350 buffer_tracker_pool_.push_back(buffer_tracker); |
| 351 |
| 352 // Enqueue the buffer in the drivers incoming queue. |
| 353 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QBUF, &buffer)) < 0) { |
| 354 DLOG(ERROR) << "Error enqueuing a V4L2 buffer back into the driver"; |
| 355 return false; |
| 356 } |
| 357 return true; |
| 358 } |
| 359 |
| 360 void V4L2VideoCaptureDelegate::FillV4L2Buffer(v4l2_buffer* buffer, |
| 361 int i) const { |
| 362 memset(buffer, 0, sizeof(*buffer)); |
| 363 buffer->memory = V4L2_MEMORY_MMAP; |
| 364 buffer->index = i; |
| 365 FinishFillingV4L2Buffer(buffer); |
| 366 } |
| 367 |
| 368 void V4L2VideoCaptureDelegate::DoCapture() { |
| 369 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); |
| 370 if (!is_capturing_) |
| 371 return; |
| 372 |
| 373 pollfd device_pfd = {}; |
| 374 device_pfd.fd = device_fd_.get(); |
| 375 device_pfd.events = POLLIN; |
| 376 const int result = HANDLE_EINTR(poll(&device_pfd, 1, kCaptureTimeoutMs)); |
| 377 if (result < 0) { |
| 378 SetErrorState("Poll failed"); |
| 379 return; |
| 380 } |
| 381 // Check if poll() timed out; track the amount of times it did in a row and |
| 382 // throw an error if it times out too many times. |
| 383 if (result == 0) { |
| 384 timeout_count_++; |
| 385 if (timeout_count_ >= kContinuousTimeoutLimit) { |
| 386 SetErrorState("Multiple continuous timeouts while read-polling."); |
| 387 timeout_count_ = 0; |
| 388 return; |
| 389 } |
| 390 } else { |
| 391 timeout_count_ = 0; |
| 392 } |
| 393 |
| 394 // Deenqueue, send and reenqueue a buffer if the driver has filled one in. |
| 395 if (device_pfd.revents & POLLIN) { |
| 396 v4l2_buffer buffer; |
| 397 FillV4L2Buffer(&buffer, 0); |
| 398 |
| 399 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_DQBUF, &buffer)) < 0) { |
| 400 SetErrorState("Failed to dequeue capture buffer"); |
| 401 return; |
| 402 } |
| 403 |
| 404 SendBuffer(buffer_tracker_pool_[buffer.index]); |
| 405 |
| 406 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QBUF, &buffer)) < 0) { |
| 407 SetErrorState("Failed to enqueue capture buffer"); |
| 408 return; |
| 409 } |
| 410 } |
| 411 |
| 412 v4l2_task_runner_->PostTask( |
| 413 FROM_HERE, base::Bind(&V4L2VideoCaptureDelegate::DoCapture, this)); |
| 414 } |
| 415 |
| 416 void V4L2VideoCaptureDelegate::SetErrorState(const std::string& reason) { |
| 417 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); |
| 418 is_capturing_ = false; |
| 419 client_->OnError(reason); |
| 420 } |
| 421 |
| 422 } // namespace media |
OLD | NEW |