OLD | NEW |
| (Empty) |
1 // Copyright 2015 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "media/video/capture/linux/v4l2_capture_delegate.h" | |
6 | |
7 #include <poll.h> | |
8 #include <sys/fcntl.h> | |
9 #include <sys/ioctl.h> | |
10 #include <sys/mman.h> | |
11 | |
12 #include "base/bind.h" | |
13 #include "base/files/file_enumerator.h" | |
14 #include "base/posix/eintr_wrapper.h" | |
15 #include "base/strings/stringprintf.h" | |
16 #include "media/base/bind_to_current_loop.h" | |
17 #include "media/video/capture/linux/v4l2_capture_delegate_multi_plane.h" | |
18 #include "media/video/capture/linux/v4l2_capture_delegate_single_plane.h" | |
19 #include "media/video/capture/linux/video_capture_device_linux.h" | |
20 | |
21 namespace media { | |
22 | |
23 // Desired number of video buffers to allocate. The actual number of allocated | |
24 // buffers by v4l2 driver can be higher or lower than this number. | |
25 // kNumVideoBuffers should not be too small, or Chrome may not return enough | |
26 // buffers back to driver in time. | |
27 const uint32 kNumVideoBuffers = 4; | |
28 // Timeout in milliseconds v4l2_thread_ blocks waiting for a frame from the hw. | |
29 const int kCaptureTimeoutMs = 200; | |
30 // The number of continuous timeouts tolerated before treated as error. | |
31 const int kContinuousTimeoutLimit = 10; | |
32 // MJPEG is preferred if the requested width or height is larger than this. | |
33 const int kMjpegWidth = 640; | |
34 const int kMjpegHeight = 480; | |
35 // Typical framerate, in fps | |
36 const int kTypicalFramerate = 30; | |
37 | |
38 // V4L2 color formats supported by V4L2CaptureDelegate derived classes. | |
39 // This list is ordered by precedence of use -- but see caveats for MJPEG. | |
40 static struct{ | |
41 uint32_t fourcc; | |
42 VideoPixelFormat pixel_format; | |
43 size_t num_planes; | |
44 } const kSupportedFormatsAndPlanarity[] = { | |
45 {V4L2_PIX_FMT_YUV420, PIXEL_FORMAT_I420, 1}, | |
46 {V4L2_PIX_FMT_YUYV, PIXEL_FORMAT_YUY2, 1}, | |
47 {V4L2_PIX_FMT_UYVY, PIXEL_FORMAT_UYVY, 1}, | |
48 #if !defined(OS_OPENBSD) | |
49 // TODO(mcasas): add V4L2_PIX_FMT_YVU420M when available in bots. | |
50 {V4L2_PIX_FMT_YUV420M, PIXEL_FORMAT_I420, 3}, | |
51 #endif | |
52 // MJPEG is usually sitting fairly low since we don't want to have to decode. | |
53 // However, is needed for large resolutions due to USB bandwidth limitations, | |
54 // so GetListOfUsableFourCcs() can duplicate it on top, see that method. | |
55 {V4L2_PIX_FMT_MJPEG, PIXEL_FORMAT_MJPEG, 1}, | |
56 // JPEG works as MJPEG on some gspca webcams from field reports, see | |
57 // https://code.google.com/p/webrtc/issues/detail?id=529, put it as the least | |
58 // preferred format. | |
59 {V4L2_PIX_FMT_JPEG, PIXEL_FORMAT_MJPEG, 1}, | |
60 }; | |
61 | |
62 // static | |
63 scoped_refptr<V4L2CaptureDelegate> | |
64 V4L2CaptureDelegate::CreateV4L2CaptureDelegate( | |
65 const VideoCaptureDevice::Name& device_name, | |
66 const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner, | |
67 int power_line_frequency) { | |
68 switch (device_name.capture_api_type()) { | |
69 case VideoCaptureDevice::Name::V4L2_SINGLE_PLANE: | |
70 return make_scoped_refptr(new V4L2CaptureDelegateSinglePlane( | |
71 device_name, v4l2_task_runner, power_line_frequency)); | |
72 case VideoCaptureDevice::Name::V4L2_MULTI_PLANE: | |
73 #if !defined(OS_OPENBSD) | |
74 return make_scoped_refptr(new V4L2CaptureDelegateMultiPlane( | |
75 device_name, v4l2_task_runner, power_line_frequency)); | |
76 default: | |
77 #endif | |
78 NOTIMPLEMENTED() << "Unknown V4L2 capture API type"; | |
79 return scoped_refptr<V4L2CaptureDelegate>(); | |
80 } | |
81 } | |
82 | |
83 //static | |
84 size_t V4L2CaptureDelegate::GetNumPlanesForFourCc(uint32_t fourcc) { | |
85 for (const auto& fourcc_and_pixel_format : kSupportedFormatsAndPlanarity) { | |
86 if (fourcc_and_pixel_format.fourcc == fourcc) | |
87 return fourcc_and_pixel_format.num_planes; | |
88 } | |
89 DVLOG(1) << "Unknown fourcc " << FourccToString(fourcc); | |
90 return 0; | |
91 } | |
92 | |
93 // static | |
94 VideoPixelFormat V4L2CaptureDelegate::V4l2FourCcToChromiumPixelFormat( | |
95 uint32_t v4l2_fourcc) { | |
96 for (const auto& fourcc_and_pixel_format : kSupportedFormatsAndPlanarity) { | |
97 if (fourcc_and_pixel_format.fourcc == v4l2_fourcc) | |
98 return fourcc_and_pixel_format.pixel_format; | |
99 } | |
100 // Not finding a pixel format is OK during device capabilities enumeration. | |
101 // Let the caller decide if PIXEL_FORMAT_UNKNOWN is an error or not. | |
102 DVLOG(1) << "Unsupported pixel format: " << FourccToString(v4l2_fourcc); | |
103 return PIXEL_FORMAT_UNKNOWN; | |
104 } | |
105 | |
106 // static | |
107 std::list<uint32_t> V4L2CaptureDelegate::GetListOfUsableFourCcs( | |
108 bool prefer_mjpeg) { | |
109 std::list<uint32_t> supported_formats; | |
110 for (const auto& format : kSupportedFormatsAndPlanarity) | |
111 supported_formats.push_back(format.fourcc); | |
112 | |
113 // Duplicate MJPEG on top of the list depending on |prefer_mjpeg|. | |
114 if (prefer_mjpeg) | |
115 supported_formats.push_front(V4L2_PIX_FMT_MJPEG); | |
116 | |
117 return supported_formats; | |
118 } | |
119 | |
120 //static | |
121 std::string V4L2CaptureDelegate::FourccToString(uint32_t fourcc) { | |
122 return base::StringPrintf("%c%c%c%c", fourcc & 0xFF, (fourcc >> 8) & 0xFF, | |
123 (fourcc >> 16) & 0xFF, (fourcc >> 24) & 0xFF); | |
124 } | |
125 | |
126 V4L2CaptureDelegate::BufferTracker::BufferTracker() { | |
127 } | |
128 | |
129 V4L2CaptureDelegate::BufferTracker::~BufferTracker() { | |
130 for (const auto& plane : planes_) { | |
131 if (plane.start == nullptr) | |
132 continue; | |
133 const int result = munmap(plane.start, plane.length); | |
134 PLOG_IF(ERROR, result < 0) << "Error munmap()ing V4L2 buffer"; | |
135 } | |
136 } | |
137 | |
138 void V4L2CaptureDelegate::BufferTracker::AddMmapedPlane(uint8_t* const start, | |
139 size_t length) { | |
140 Plane plane; | |
141 plane.start = start; | |
142 plane.length = length; | |
143 planes_.push_back(plane); | |
144 } | |
145 | |
146 V4L2CaptureDelegate::V4L2CaptureDelegate( | |
147 const VideoCaptureDevice::Name& device_name, | |
148 const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner, | |
149 int power_line_frequency) | |
150 : capture_type_((device_name.capture_api_type() == | |
151 VideoCaptureDevice::Name::V4L2_SINGLE_PLANE) | |
152 ? V4L2_BUF_TYPE_VIDEO_CAPTURE | |
153 : V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE), | |
154 v4l2_task_runner_(v4l2_task_runner), | |
155 device_name_(device_name), | |
156 power_line_frequency_(power_line_frequency), | |
157 is_capturing_(false), | |
158 timeout_count_(0), | |
159 rotation_(0) { | |
160 } | |
161 | |
162 V4L2CaptureDelegate::~V4L2CaptureDelegate() { | |
163 } | |
164 | |
165 void V4L2CaptureDelegate::AllocateAndStart( | |
166 int width, | |
167 int height, | |
168 float frame_rate, | |
169 scoped_ptr<VideoCaptureDevice::Client> client) { | |
170 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); | |
171 DCHECK(client); | |
172 client_ = client.Pass(); | |
173 | |
174 // Need to open camera with O_RDWR after Linux kernel 3.3. | |
175 device_fd_.reset(HANDLE_EINTR(open(device_name_.id().c_str(), O_RDWR))); | |
176 if (!device_fd_.is_valid()) { | |
177 SetErrorState("Failed to open V4L2 device driver file."); | |
178 return; | |
179 } | |
180 | |
181 v4l2_capability cap = {}; | |
182 if (!((HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QUERYCAP, &cap)) == 0) && | |
183 ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE || | |
184 cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE) && | |
185 !(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT) && | |
186 !(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT_MPLANE)))) { | |
187 device_fd_.reset(); | |
188 SetErrorState("This is not a V4L2 video capture device"); | |
189 return; | |
190 } | |
191 | |
192 // Get supported video formats in preferred order. | |
193 // For large resolutions, favour mjpeg over raw formats. | |
194 const std::list<uint32_t>& desired_v4l2_formats = | |
195 GetListOfUsableFourCcs(width > kMjpegWidth || height > kMjpegHeight); | |
196 std::list<uint32_t>::const_iterator best = desired_v4l2_formats.end(); | |
197 | |
198 v4l2_fmtdesc fmtdesc = {}; | |
199 fmtdesc.type = capture_type_; | |
200 for (; HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_ENUM_FMT, &fmtdesc)) == 0; | |
201 ++fmtdesc.index) { | |
202 best = std::find(desired_v4l2_formats.begin(), best, fmtdesc.pixelformat); | |
203 } | |
204 if (best == desired_v4l2_formats.end()) { | |
205 SetErrorState("Failed to find a supported camera format."); | |
206 return; | |
207 } | |
208 | |
209 DVLOG(1) << "Chosen pixel format is " << FourccToString(*best); | |
210 | |
211 video_fmt_.type = capture_type_; | |
212 if (!FillV4L2Format(&video_fmt_, width, height, *best)) { | |
213 SetErrorState("Failed filling in V4L2 Format"); | |
214 return; | |
215 } | |
216 | |
217 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_FMT, &video_fmt_)) < 0) { | |
218 SetErrorState("Failed to set video capture format"); | |
219 return; | |
220 } | |
221 const VideoPixelFormat pixel_format = | |
222 V4l2FourCcToChromiumPixelFormat(video_fmt_.fmt.pix.pixelformat); | |
223 if (pixel_format == PIXEL_FORMAT_UNKNOWN) { | |
224 SetErrorState("Unsupported pixel format"); | |
225 return; | |
226 } | |
227 | |
228 // Set capture framerate in the form of capture interval. | |
229 v4l2_streamparm streamparm = {}; | |
230 streamparm.type = capture_type_; | |
231 // The following line checks that the driver knows about framerate get/set. | |
232 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_G_PARM, &streamparm)) >= 0) { | |
233 // Now check if the device is able to accept a capture framerate set. | |
234 if (streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) { | |
235 // |frame_rate| is float, approximate by a fraction. | |
236 streamparm.parm.capture.timeperframe.numerator = | |
237 media::kFrameRatePrecision; | |
238 streamparm.parm.capture.timeperframe.denominator = | |
239 (frame_rate) ? (frame_rate * media::kFrameRatePrecision) | |
240 : (kTypicalFramerate * media::kFrameRatePrecision); | |
241 | |
242 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_PARM, &streamparm)) < | |
243 0) { | |
244 SetErrorState("Failed to set camera framerate"); | |
245 return; | |
246 } | |
247 DVLOG(2) << "Actual camera driverframerate: " | |
248 << streamparm.parm.capture.timeperframe.denominator << "/" | |
249 << streamparm.parm.capture.timeperframe.numerator; | |
250 } | |
251 } | |
252 // TODO(mcasas): what should be done if the camera driver does not allow | |
253 // framerate configuration, or the actual one is different from the desired? | |
254 | |
255 // Set anti-banding/anti-flicker to 50/60Hz. May fail due to not supported | |
256 // operation (|errno| == EINVAL in this case) or plain failure. | |
257 if ((power_line_frequency_ == V4L2_CID_POWER_LINE_FREQUENCY_50HZ) || | |
258 (power_line_frequency_ == V4L2_CID_POWER_LINE_FREQUENCY_60HZ) || | |
259 (power_line_frequency_ == V4L2_CID_POWER_LINE_FREQUENCY_AUTO)) { | |
260 struct v4l2_control control = {}; | |
261 control.id = V4L2_CID_POWER_LINE_FREQUENCY; | |
262 control.value = power_line_frequency_; | |
263 const int retval = | |
264 HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_CTRL, &control)); | |
265 if (retval != 0) | |
266 DVLOG(1) << "Error setting power line frequency removal"; | |
267 } | |
268 | |
269 capture_format_.frame_size.SetSize(video_fmt_.fmt.pix.width, | |
270 video_fmt_.fmt.pix.height); | |
271 capture_format_.frame_rate = frame_rate; | |
272 capture_format_.pixel_format = pixel_format; | |
273 | |
274 v4l2_requestbuffers r_buffer = {}; | |
275 r_buffer.type = capture_type_; | |
276 r_buffer.memory = V4L2_MEMORY_MMAP; | |
277 r_buffer.count = kNumVideoBuffers; | |
278 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_REQBUFS, &r_buffer)) < 0) { | |
279 SetErrorState("Error requesting MMAP buffers from V4L2"); | |
280 return; | |
281 } | |
282 for (unsigned int i = 0; i < r_buffer.count; ++i) { | |
283 if (!MapAndQueueBuffer(i)) { | |
284 SetErrorState("Allocate buffer failed"); | |
285 return; | |
286 } | |
287 } | |
288 | |
289 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_STREAMON, &capture_type_)) | |
290 < 0) { | |
291 SetErrorState("VIDIOC_STREAMON failed"); | |
292 return; | |
293 } | |
294 | |
295 is_capturing_ = true; | |
296 // Post task to start fetching frames from v4l2. | |
297 v4l2_task_runner_->PostTask( | |
298 FROM_HERE, base::Bind(&V4L2CaptureDelegate::DoCapture, this)); | |
299 } | |
300 | |
301 void V4L2CaptureDelegate::StopAndDeAllocate() { | |
302 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); | |
303 // The order is important: stop streaming, clear |buffer_pool_|, | |
304 // thus munmap()ing the v4l2_buffers, and then return them to the OS. | |
305 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_STREAMOFF, &capture_type_)) | |
306 < 0) { | |
307 SetErrorState("VIDIOC_STREAMOFF failed"); | |
308 return; | |
309 } | |
310 | |
311 buffer_tracker_pool_.clear(); | |
312 | |
313 v4l2_requestbuffers r_buffer = {}; | |
314 r_buffer.type = capture_type_; | |
315 r_buffer.memory = V4L2_MEMORY_MMAP; | |
316 r_buffer.count = 0; | |
317 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_REQBUFS, &r_buffer)) < 0) | |
318 SetErrorState("Failed to VIDIOC_REQBUFS with count = 0"); | |
319 | |
320 // At this point we can close the device. | |
321 // This is also needed for correctly changing settings later via VIDIOC_S_FMT. | |
322 device_fd_.reset(); | |
323 is_capturing_ = false; | |
324 client_.reset(); | |
325 } | |
326 | |
327 void V4L2CaptureDelegate::SetRotation(int rotation) { | |
328 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); | |
329 DCHECK(rotation >= 0 && rotation < 360 && rotation % 90 == 0); | |
330 rotation_ = rotation; | |
331 } | |
332 | |
333 bool V4L2CaptureDelegate::MapAndQueueBuffer(int index) { | |
334 v4l2_buffer buffer; | |
335 FillV4L2Buffer(&buffer, index); | |
336 | |
337 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QUERYBUF, &buffer)) < 0) { | |
338 DLOG(ERROR) << "Error querying status of a MMAP V4L2 buffer"; | |
339 return false; | |
340 } | |
341 | |
342 const scoped_refptr<BufferTracker>& buffer_tracker = CreateBufferTracker(); | |
343 if (!buffer_tracker->Init(device_fd_.get(), buffer)) { | |
344 DLOG(ERROR) << "Error creating BufferTracker"; | |
345 return false; | |
346 } | |
347 buffer_tracker_pool_.push_back(buffer_tracker); | |
348 | |
349 // Enqueue the buffer in the drivers incoming queue. | |
350 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QBUF, &buffer)) < 0) { | |
351 DLOG(ERROR) << "Error enqueuing a V4L2 buffer back into the driver"; | |
352 return false; | |
353 } | |
354 return true; | |
355 } | |
356 | |
357 void V4L2CaptureDelegate::FillV4L2Buffer(v4l2_buffer* buffer, | |
358 int i) const { | |
359 memset(buffer, 0, sizeof(*buffer)); | |
360 buffer->memory = V4L2_MEMORY_MMAP; | |
361 buffer->index = i; | |
362 FinishFillingV4L2Buffer(buffer); | |
363 } | |
364 | |
365 void V4L2CaptureDelegate::DoCapture() { | |
366 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); | |
367 if (!is_capturing_) | |
368 return; | |
369 | |
370 pollfd device_pfd = {}; | |
371 device_pfd.fd = device_fd_.get(); | |
372 device_pfd.events = POLLIN; | |
373 const int result = HANDLE_EINTR(poll(&device_pfd, 1, kCaptureTimeoutMs)); | |
374 if (result < 0) { | |
375 SetErrorState("Poll failed"); | |
376 return; | |
377 } | |
378 // Check if poll() timed out; track the amount of times it did in a row and | |
379 // throw an error if it times out too many times. | |
380 if (result == 0) { | |
381 timeout_count_++; | |
382 if (timeout_count_ >= kContinuousTimeoutLimit) { | |
383 SetErrorState("Multiple continuous timeouts while read-polling."); | |
384 timeout_count_ = 0; | |
385 return; | |
386 } | |
387 } else { | |
388 timeout_count_ = 0; | |
389 } | |
390 | |
391 // Deenqueue, send and reenqueue a buffer if the driver has filled one in. | |
392 if (device_pfd.revents & POLLIN) { | |
393 v4l2_buffer buffer; | |
394 FillV4L2Buffer(&buffer, 0); | |
395 | |
396 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_DQBUF, &buffer)) < 0) { | |
397 SetErrorState("Failed to dequeue capture buffer"); | |
398 return; | |
399 } | |
400 | |
401 SendBuffer(buffer_tracker_pool_[buffer.index], video_fmt_); | |
402 | |
403 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QBUF, &buffer)) < 0) { | |
404 SetErrorState("Failed to enqueue capture buffer"); | |
405 return; | |
406 } | |
407 } | |
408 | |
409 v4l2_task_runner_->PostTask( | |
410 FROM_HERE, base::Bind(&V4L2CaptureDelegate::DoCapture, this)); | |
411 } | |
412 | |
413 void V4L2CaptureDelegate::SetErrorState(const std::string& reason) { | |
414 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); | |
415 is_capturing_ = false; | |
416 client_->OnError(reason); | |
417 } | |
418 | |
419 } // namespace media | |
OLD | NEW |