OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "media/video/capture/linux/video_capture_device_linux.h" | 5 #include "media/video/capture/linux/v4l2_capture_delegate.h" |
6 | 6 |
7 #include <errno.h> | |
8 #include <fcntl.h> | |
9 #include <poll.h> | 7 #include <poll.h> |
10 #if defined(OS_OPENBSD) | 8 #include <sys/fcntl.h> |
11 #include <sys/videoio.h> | |
12 #else | |
13 #include <linux/videodev2.h> | |
14 #endif | |
15 #include <sys/ioctl.h> | 9 #include <sys/ioctl.h> |
16 #include <sys/mman.h> | 10 #include <sys/mman.h> |
17 | 11 |
18 #include <list> | |
19 #include <string> | |
20 | |
21 #include "base/bind.h" | 12 #include "base/bind.h" |
22 #include "base/files/file_enumerator.h" | 13 #include "base/files/file_enumerator.h" |
23 #include "base/files/scoped_file.h" | |
24 #include "base/posix/eintr_wrapper.h" | 14 #include "base/posix/eintr_wrapper.h" |
25 #include "base/strings/stringprintf.h" | 15 #include "base/strings/stringprintf.h" |
| 16 #include "media/base/bind_to_current_loop.h" |
| 17 #include "media/video/capture/linux/v4l2_capture_delegate_multi_plane.h" |
| 18 #include "media/video/capture/linux/v4l2_capture_delegate_single_plane.h" |
| 19 #include "media/video/capture/linux/video_capture_device_linux.h" |
26 | 20 |
27 namespace media { | 21 namespace media { |
28 | 22 |
29 #define GET_V4L2_FOURCC_CHAR(a, index) ((char)( ((a) >> (8 * index)) & 0xff)) | |
30 | |
31 // Desired number of video buffers to allocate. The actual number of allocated | 23 // Desired number of video buffers to allocate. The actual number of allocated |
32 // buffers by v4l2 driver can be higher or lower than this number. | 24 // buffers by v4l2 driver can be higher or lower than this number. |
33 // kNumVideoBuffers should not be too small, or Chrome may not return enough | 25 // kNumVideoBuffers should not be too small, or Chrome may not return enough |
34 // buffers back to driver in time. | 26 // buffers back to driver in time. |
35 const uint32 kNumVideoBuffers = 4; | 27 const uint32 kNumVideoBuffers = 4; |
36 // Timeout in milliseconds v4l2_thread_ blocks waiting for a frame from the hw. | 28 // Timeout in milliseconds v4l2_thread_ blocks waiting for a frame from the hw. |
37 enum { kCaptureTimeoutMs = 200 }; | 29 const int kCaptureTimeoutMs = 200; |
38 // The number of continuous timeouts tolerated before treated as error. | 30 // The number of continuous timeouts tolerated before treated as error. |
39 enum { kContinuousTimeoutLimit = 10 }; | 31 const int kContinuousTimeoutLimit = 10; |
40 // MJPEG is preferred if the width or height is larger than this. | 32 // MJPEG is preferred if the requested width or height is larger than this. |
41 enum { kMjpegWidth = 640 }; | 33 const int kMjpegWidth = 640; |
42 enum { kMjpegHeight = 480 }; | 34 const int kMjpegHeight = 480; |
43 // Typical framerate, in fps | 35 // Typical framerate, in fps |
44 enum { kTypicalFramerate = 30 }; | 36 const int kTypicalFramerate = 30; |
45 | 37 |
46 class VideoCaptureDeviceLinux::V4L2CaptureDelegate | 38 // V4L2 color formats supported by V4L2CaptureDelegate derived classes. |
47 : public base::RefCountedThreadSafe<V4L2CaptureDelegate>{ | 39 // This list is ordered by precedence of use -- but see caveats for MJPEG. |
48 public: | 40 static struct{ |
49 V4L2CaptureDelegate( | 41 uint32_t fourcc; |
50 const Name& device_name, | 42 VideoPixelFormat pixel_format; |
51 const scoped_refptr<base::SingleThreadTaskRunner> v4l2_task_runner, | 43 size_t num_planes; |
52 int power_line_frequency); | 44 } const kSupportedFormatsAndPlanarity[] = { |
53 | 45 {V4L2_PIX_FMT_YUV420, PIXEL_FORMAT_I420, 1}, |
54 void AllocateAndStart(int width, | 46 {V4L2_PIX_FMT_YUYV, PIXEL_FORMAT_YUY2, 1}, |
55 int height, | 47 {V4L2_PIX_FMT_UYVY, PIXEL_FORMAT_UYVY, 1}, |
56 float frame_rate, | 48 #if !defined(OS_OPENBSD) |
57 scoped_ptr<Client> client); | 49 // TODO(mcasas): add V4L2_PIX_FMT_YVU420M when available in bots. |
58 void StopAndDeAllocate(); | 50 {V4L2_PIX_FMT_YUV420M, PIXEL_FORMAT_I420, 3}, |
59 void SetRotation(int rotation); | 51 #endif |
60 bool DeAllocateVideoBuffers(); | 52 // MJPEG is usually sitting fairly low since we don't want to have to decode. |
61 | 53 // However, is needed for large resolutions due to USB bandwidth limitations, |
62 private: | 54 // so GetListOfUsableFourCcs() can duplicate it on top, see that method. |
63 // Buffers used to receive captured frames from v4l2. | 55 {V4L2_PIX_FMT_MJPEG, PIXEL_FORMAT_MJPEG, 1}, |
64 struct Buffer { | 56 // JPEG works as MJPEG on some gspca webcams from field reports, see |
65 Buffer() : start(0), length(0) {} | 57 // https://code.google.com/p/webrtc/issues/detail?id=529, put it as the least |
66 void* start; | 58 // preferred format. |
67 size_t length; | 59 {V4L2_PIX_FMT_JPEG, PIXEL_FORMAT_MJPEG, 1}, |
68 }; | |
69 | |
70 friend class base::RefCountedThreadSafe<V4L2CaptureDelegate>; | |
71 ~V4L2CaptureDelegate(); | |
72 | |
73 void DoCapture(); | |
74 bool AllocateVideoBuffers(); | |
75 void SetErrorState(const std::string& reason); | |
76 | |
77 const scoped_refptr<base::SingleThreadTaskRunner> v4l2_task_runner_; | |
78 | |
79 bool is_capturing_; | |
80 scoped_ptr<VideoCaptureDevice::Client> client_; | |
81 const Name device_name_; | |
82 base::ScopedFD device_fd_; // File descriptor for the opened camera device. | |
83 Buffer* buffer_pool_; | |
84 int buffer_pool_size_; // Number of allocated buffers. | |
85 int timeout_count_; | |
86 VideoCaptureFormat capture_format_; | |
87 const int power_line_frequency_; | |
88 | |
89 // Clockwise rotation in degrees. This value should be 0, 90, 180, or 270. | |
90 int rotation_; | |
91 | |
92 DISALLOW_IMPLICIT_CONSTRUCTORS(V4L2CaptureDelegate); | |
93 }; | 60 }; |
94 | 61 |
95 // V4L2 color formats VideoCaptureDeviceLinux support. | 62 // static |
96 static const int32 kV4l2RawFmts[] = { | 63 scoped_refptr<V4L2CaptureDelegate> |
97 V4L2_PIX_FMT_YUV420, | 64 V4L2CaptureDelegate::CreateV4L2CaptureDelegate( |
98 V4L2_PIX_FMT_YUYV, | 65 const VideoCaptureDevice::Name& device_name, |
99 V4L2_PIX_FMT_UYVY | 66 const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner, |
100 }; | 67 int power_line_frequency) { |
101 | 68 switch (device_name.capture_api_type()) { |
102 // USB VID and PID are both 4 bytes long. | 69 case VideoCaptureDevice::Name::V4L2_SINGLE_PLANE: |
103 static const size_t kVidPidSize = 4; | 70 return make_scoped_refptr(new V4L2CaptureDelegateSinglePlane( |
104 | 71 device_name, v4l2_task_runner, power_line_frequency)); |
105 // /sys/class/video4linux/video{N}/device is a symlink to the corresponding | 72 case VideoCaptureDevice::Name::V4L2_MULTI_PLANE: |
106 // USB device info directory. | 73 #if !defined(OS_OPENBSD) |
107 static const char kVidPathTemplate[] = | 74 return make_scoped_refptr(new V4L2CaptureDelegateMultiPlane( |
108 "/sys/class/video4linux/%s/device/../idVendor"; | 75 device_name, v4l2_task_runner, power_line_frequency)); |
109 static const char kPidPathTemplate[] = | 76 default: |
110 "/sys/class/video4linux/%s/device/../idProduct"; | 77 #endif |
111 | 78 NOTIMPLEMENTED() << "Unknown V4L2 capture API type"; |
112 static bool ReadIdFile(const std::string path, std::string* id) { | 79 return scoped_refptr<V4L2CaptureDelegate>(); |
113 char id_buf[kVidPidSize]; | 80 } |
114 FILE* file = fopen(path.c_str(), "rb"); | |
115 if (!file) | |
116 return false; | |
117 const bool success = fread(id_buf, kVidPidSize, 1, file) == 1; | |
118 fclose(file); | |
119 if (!success) | |
120 return false; | |
121 id->append(id_buf, kVidPidSize); | |
122 return true; | |
123 } | 81 } |
124 | 82 |
125 // This function translates Video4Linux pixel formats to Chromium pixel formats, | 83 //static |
126 // should only support those listed in GetListOfUsableFourCCs. | 84 size_t V4L2CaptureDelegate::GetNumPlanesForFourCc(uint32_t fourcc) { |
| 85 for (const auto& fourcc_and_pixel_format : kSupportedFormatsAndPlanarity) { |
| 86 if (fourcc_and_pixel_format.fourcc == fourcc) |
| 87 return fourcc_and_pixel_format.num_planes; |
| 88 } |
| 89 DVLOG(1) << "Unknown fourcc " << FourccToString(fourcc); |
| 90 return 0; |
| 91 } |
| 92 |
127 // static | 93 // static |
128 VideoPixelFormat VideoCaptureDeviceLinux::V4l2FourCcToChromiumPixelFormat( | 94 VideoPixelFormat V4L2CaptureDelegate::V4l2FourCcToChromiumPixelFormat( |
129 uint32 v4l2_fourcc) { | 95 uint32_t v4l2_fourcc) { |
130 const struct { | 96 for (const auto& fourcc_and_pixel_format : kSupportedFormatsAndPlanarity) { |
131 uint32 fourcc; | |
132 VideoPixelFormat pixel_format; | |
133 } kFourCcAndChromiumPixelFormat[] = { | |
134 {V4L2_PIX_FMT_YUV420, PIXEL_FORMAT_I420}, | |
135 {V4L2_PIX_FMT_YUYV, PIXEL_FORMAT_YUY2}, | |
136 {V4L2_PIX_FMT_UYVY, PIXEL_FORMAT_UYVY}, | |
137 {V4L2_PIX_FMT_MJPEG, PIXEL_FORMAT_MJPEG}, | |
138 {V4L2_PIX_FMT_JPEG, PIXEL_FORMAT_MJPEG}, | |
139 }; | |
140 for (const auto& fourcc_and_pixel_format : kFourCcAndChromiumPixelFormat) { | |
141 if (fourcc_and_pixel_format.fourcc == v4l2_fourcc) | 97 if (fourcc_and_pixel_format.fourcc == v4l2_fourcc) |
142 return fourcc_and_pixel_format.pixel_format; | 98 return fourcc_and_pixel_format.pixel_format; |
143 } | 99 } |
144 DVLOG(1) << "Unsupported pixel format: " | 100 // Not finding a pixel format is OK during device capabilities enumeration. |
145 << GET_V4L2_FOURCC_CHAR(v4l2_fourcc, 0) | 101 // Let the caller decide if PIXEL_FORMAT_UNKNOWN is an error or not. |
146 << GET_V4L2_FOURCC_CHAR(v4l2_fourcc, 1) | 102 DVLOG(1) << "Unsupported pixel format: " << FourccToString(v4l2_fourcc); |
147 << GET_V4L2_FOURCC_CHAR(v4l2_fourcc, 2) | |
148 << GET_V4L2_FOURCC_CHAR(v4l2_fourcc, 3); | |
149 return PIXEL_FORMAT_UNKNOWN; | 103 return PIXEL_FORMAT_UNKNOWN; |
150 } | 104 } |
151 | 105 |
152 // static | 106 // static |
153 std::list<int> VideoCaptureDeviceLinux::GetListOfUsableFourCCs( | 107 std::list<uint32_t> V4L2CaptureDelegate::GetListOfUsableFourCcs( |
154 bool favour_mjpeg) { | 108 bool prefer_mjpeg) { |
155 std::list<int> fourccs; | 109 std::list<uint32_t> supported_formats; |
156 for (size_t i = 0; i < arraysize(kV4l2RawFmts); ++i) | 110 for (const auto& format : kSupportedFormatsAndPlanarity) |
157 fourccs.push_back(kV4l2RawFmts[i]); | 111 supported_formats.push_back(format.fourcc); |
158 if (favour_mjpeg) | |
159 fourccs.push_front(V4L2_PIX_FMT_MJPEG); | |
160 else | |
161 fourccs.push_back(V4L2_PIX_FMT_MJPEG); | |
162 | 112 |
163 // JPEG works as MJPEG on some gspca webcams from field reports. | 113 // Duplicate MJPEG on top of the list depending on |prefer_mjpeg|. |
164 // Put it as the least preferred format. | 114 if (prefer_mjpeg) |
165 fourccs.push_back(V4L2_PIX_FMT_JPEG); | 115 supported_formats.push_front(V4L2_PIX_FMT_MJPEG); |
166 return fourccs; | 116 |
| 117 return supported_formats; |
167 } | 118 } |
168 | 119 |
169 const std::string VideoCaptureDevice::Name::GetModel() const { | 120 //static |
170 // |unique_id| is of the form "/dev/video2". |file_name| is "video2". | 121 std::string V4L2CaptureDelegate::FourccToString(uint32_t fourcc) { |
171 const std::string dev_dir = "/dev/"; | 122 return base::StringPrintf("%c%c%c%c", fourcc & 0xFF, (fourcc >> 8) & 0xFF, |
172 DCHECK_EQ(0, unique_id_.compare(0, dev_dir.length(), dev_dir)); | 123 (fourcc >> 16) & 0xFF, (fourcc >> 24) & 0xFF); |
173 const std::string file_name = | |
174 unique_id_.substr(dev_dir.length(), unique_id_.length()); | |
175 | |
176 const std::string vidPath = | |
177 base::StringPrintf(kVidPathTemplate, file_name.c_str()); | |
178 const std::string pidPath = | |
179 base::StringPrintf(kPidPathTemplate, file_name.c_str()); | |
180 | |
181 std::string usb_id; | |
182 if (!ReadIdFile(vidPath, &usb_id)) | |
183 return ""; | |
184 usb_id.append(":"); | |
185 if (!ReadIdFile(pidPath, &usb_id)) | |
186 return ""; | |
187 | |
188 return usb_id; | |
189 } | 124 } |
190 | 125 |
191 VideoCaptureDeviceLinux::VideoCaptureDeviceLinux(const Name& device_name) | 126 V4L2CaptureDelegate::BufferTracker::BufferTracker() { |
192 : v4l2_thread_("V4L2CaptureThread"), | |
193 device_name_(device_name) { | |
194 } | 127 } |
195 | 128 |
196 VideoCaptureDeviceLinux::~VideoCaptureDeviceLinux() { | 129 V4L2CaptureDelegate::BufferTracker::~BufferTracker() { |
197 // Check if the thread is running. | 130 for (const auto& plane : planes_) { |
198 // This means that the device has not been StopAndDeAllocate()d properly. | 131 if (plane.start == nullptr) |
199 DCHECK(!v4l2_thread_.IsRunning()); | 132 continue; |
200 v4l2_thread_.Stop(); | 133 const int result = munmap(plane.start, plane.length); |
201 } | 134 PLOG_IF(ERROR, result < 0) << "Error munmap()ing V4L2 buffer"; |
202 | |
203 void VideoCaptureDeviceLinux::AllocateAndStart( | |
204 const VideoCaptureParams& params, | |
205 scoped_ptr<VideoCaptureDevice::Client> client) { | |
206 DCHECK(!capture_impl_); | |
207 if (v4l2_thread_.IsRunning()) | |
208 return; // Wrong state. | |
209 v4l2_thread_.Start(); | |
210 capture_impl_ = new V4L2CaptureDelegate(device_name_, | |
211 v4l2_thread_.message_loop_proxy(), | |
212 GetPowerLineFrequencyForLocation()); | |
213 v4l2_thread_.message_loop()->PostTask( | |
214 FROM_HERE, | |
215 base::Bind( | |
216 &VideoCaptureDeviceLinux::V4L2CaptureDelegate::AllocateAndStart, | |
217 capture_impl_, | |
218 params.requested_format.frame_size.width(), | |
219 params.requested_format.frame_size.height(), | |
220 params.requested_format.frame_rate, | |
221 base::Passed(&client))); | |
222 } | |
223 | |
224 void VideoCaptureDeviceLinux::StopAndDeAllocate() { | |
225 if (!v4l2_thread_.IsRunning()) | |
226 return; // Wrong state. | |
227 v4l2_thread_.message_loop()->PostTask( | |
228 FROM_HERE, | |
229 base::Bind( | |
230 &VideoCaptureDeviceLinux::V4L2CaptureDelegate::StopAndDeAllocate, | |
231 capture_impl_)); | |
232 v4l2_thread_.Stop(); | |
233 // TODO(mcasas): VCDLinux called DeAllocateVideoBuffers() a second time after | |
234 // stopping |v4l2_thread_| to make sure buffers were completely deallocated. | |
235 // Investigate if that's needed, otherwise remove the following line and make | |
236 // V4L2CaptureDelegate::DeAllocateVideoBuffers() private. | |
237 capture_impl_->DeAllocateVideoBuffers(); | |
238 capture_impl_ = NULL; | |
239 } | |
240 | |
241 void VideoCaptureDeviceLinux::SetRotation(int rotation) { | |
242 if (v4l2_thread_.IsRunning()) { | |
243 v4l2_thread_.message_loop()->PostTask( | |
244 FROM_HERE, | |
245 base::Bind( | |
246 &VideoCaptureDeviceLinux::V4L2CaptureDelegate::SetRotation, | |
247 capture_impl_, | |
248 rotation)); | |
249 } | 135 } |
250 } | 136 } |
251 | 137 |
252 VideoCaptureDeviceLinux::V4L2CaptureDelegate::V4L2CaptureDelegate( | 138 void V4L2CaptureDelegate::BufferTracker::AddMmapedPlane(uint8_t* const start, |
253 const Name& device_name, | 139 size_t length) { |
254 const scoped_refptr<base::SingleThreadTaskRunner> v4l2_task_runner, | 140 Plane plane; |
255 int power_line_frequency) | 141 plane.start = start; |
256 : v4l2_task_runner_(v4l2_task_runner), | 142 plane.length = length; |
| 143 planes_.push_back(plane); |
| 144 } |
| 145 |
| 146 V4L2CaptureDelegate::V4L2CaptureDelegate( |
| 147 const VideoCaptureDevice::Name& device_name, |
| 148 const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner, |
| 149 int power_line_frequency) |
| 150 : capture_type_((device_name.capture_api_type() == |
| 151 VideoCaptureDevice::Name::V4L2_SINGLE_PLANE) |
| 152 ? V4L2_BUF_TYPE_VIDEO_CAPTURE |
| 153 : V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE), |
| 154 v4l2_task_runner_(v4l2_task_runner), |
| 155 device_name_(device_name), |
| 156 power_line_frequency_(power_line_frequency), |
257 is_capturing_(false), | 157 is_capturing_(false), |
258 device_name_(device_name), | |
259 buffer_pool_(NULL), | |
260 buffer_pool_size_(0), | |
261 timeout_count_(0), | 158 timeout_count_(0), |
262 power_line_frequency_(power_line_frequency), | |
263 rotation_(0) { | 159 rotation_(0) { |
264 } | 160 } |
265 | 161 |
266 VideoCaptureDeviceLinux::V4L2CaptureDelegate::~V4L2CaptureDelegate() { | 162 V4L2CaptureDelegate::~V4L2CaptureDelegate() { |
267 DCHECK(!client_); | |
268 } | 163 } |
269 | 164 |
270 void VideoCaptureDeviceLinux::V4L2CaptureDelegate::AllocateAndStart( | 165 void V4L2CaptureDelegate::AllocateAndStart( |
271 int width, | 166 int width, |
272 int height, | 167 int height, |
273 float frame_rate, | 168 float frame_rate, |
274 scoped_ptr<Client> client) { | 169 scoped_ptr<VideoCaptureDevice::Client> client) { |
275 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); | 170 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); |
276 DCHECK(client); | 171 DCHECK(client); |
277 client_ = client.Pass(); | 172 client_ = client.Pass(); |
278 | 173 |
279 // Need to open camera with O_RDWR after Linux kernel 3.3. | 174 // Need to open camera with O_RDWR after Linux kernel 3.3. |
280 device_fd_.reset(HANDLE_EINTR(open(device_name_.id().c_str(), O_RDWR))); | 175 device_fd_.reset(HANDLE_EINTR(open(device_name_.id().c_str(), O_RDWR))); |
281 if (!device_fd_.is_valid()) { | 176 if (!device_fd_.is_valid()) { |
282 SetErrorState("Failed to open V4L2 device driver file."); | 177 SetErrorState("Failed to open V4L2 device driver file."); |
283 return; | 178 return; |
284 } | 179 } |
285 | 180 |
286 v4l2_capability cap = {}; | 181 v4l2_capability cap = {}; |
287 if (!((HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QUERYCAP, &cap)) == 0) && | 182 if (!((HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QUERYCAP, &cap)) == 0) && |
288 (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE && | 183 ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE || |
289 !(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT)))) { | 184 cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE) && |
| 185 !(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT) && |
| 186 !(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT_MPLANE)))) { |
290 device_fd_.reset(); | 187 device_fd_.reset(); |
291 SetErrorState("This is not a V4L2 video capture device"); | 188 SetErrorState("This is not a V4L2 video capture device"); |
292 return; | 189 return; |
293 } | 190 } |
294 | 191 |
295 // Get supported video formats in preferred order. | 192 // Get supported video formats in preferred order. |
296 // For large resolutions, favour mjpeg over raw formats. | 193 // For large resolutions, favour mjpeg over raw formats. |
297 const std::list<int>& desired_v4l2_formats = | 194 const std::list<uint32_t>& desired_v4l2_formats = |
298 GetListOfUsableFourCCs(width > kMjpegWidth || height > kMjpegHeight); | 195 GetListOfUsableFourCcs(width > kMjpegWidth || height > kMjpegHeight); |
299 std::list<int>::const_iterator best = desired_v4l2_formats.end(); | 196 std::list<uint32_t>::const_iterator best = desired_v4l2_formats.end(); |
300 | 197 |
301 v4l2_fmtdesc fmtdesc = {}; | 198 v4l2_fmtdesc fmtdesc = {}; |
302 fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | 199 fmtdesc.type = capture_type_; |
303 for (; HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_ENUM_FMT, &fmtdesc)) == 0; | 200 for (; HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_ENUM_FMT, &fmtdesc)) == 0; |
304 ++fmtdesc.index) { | 201 ++fmtdesc.index) { |
305 best = std::find(desired_v4l2_formats.begin(), best, fmtdesc.pixelformat); | 202 best = std::find(desired_v4l2_formats.begin(), best, fmtdesc.pixelformat); |
306 } | 203 } |
307 if (best == desired_v4l2_formats.end()) { | 204 if (best == desired_v4l2_formats.end()) { |
308 SetErrorState("Failed to find a supported camera format."); | 205 SetErrorState("Failed to find a supported camera format."); |
309 return; | 206 return; |
310 } | 207 } |
311 | 208 |
312 v4l2_format video_fmt = {}; | 209 DVLOG(1) << "Chosen pixel format is " << FourccToString(*best); |
313 video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | 210 |
314 video_fmt.fmt.pix.sizeimage = 0; | 211 video_fmt_.type = capture_type_; |
315 video_fmt.fmt.pix.width = width; | 212 if (!FillV4L2Format(&video_fmt_, width, height, *best)) { |
316 video_fmt.fmt.pix.height = height; | 213 SetErrorState("Failed filling in V4L2 Format"); |
317 video_fmt.fmt.pix.pixelformat = *best; | 214 return; |
318 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_FMT, &video_fmt)) < 0) { | 215 } |
| 216 |
| 217 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_FMT, &video_fmt_)) < 0) { |
319 SetErrorState("Failed to set video capture format"); | 218 SetErrorState("Failed to set video capture format"); |
320 return; | 219 return; |
321 } | 220 } |
| 221 const VideoPixelFormat pixel_format = |
| 222 V4l2FourCcToChromiumPixelFormat(video_fmt_.fmt.pix.pixelformat); |
| 223 if (pixel_format == PIXEL_FORMAT_UNKNOWN) { |
| 224 SetErrorState("Unsupported pixel format"); |
| 225 return; |
| 226 } |
322 | 227 |
323 // Set capture framerate in the form of capture interval. | 228 // Set capture framerate in the form of capture interval. |
324 v4l2_streamparm streamparm = {}; | 229 v4l2_streamparm streamparm = {}; |
325 streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | 230 streamparm.type = capture_type_; |
326 // The following line checks that the driver knows about framerate get/set. | 231 // The following line checks that the driver knows about framerate get/set. |
327 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_G_PARM, &streamparm)) >= 0) { | 232 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_G_PARM, &streamparm)) >= 0) { |
328 // Now check if the device is able to accept a capture framerate set. | 233 // Now check if the device is able to accept a capture framerate set. |
329 if (streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) { | 234 if (streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) { |
330 // |frame_rate| is float, approximate by a fraction. | 235 // |frame_rate| is float, approximate by a fraction. |
331 streamparm.parm.capture.timeperframe.numerator = | 236 streamparm.parm.capture.timeperframe.numerator = |
332 media::kFrameRatePrecision; | 237 media::kFrameRatePrecision; |
333 streamparm.parm.capture.timeperframe.denominator = (frame_rate) ? | 238 streamparm.parm.capture.timeperframe.denominator = |
334 (frame_rate * media::kFrameRatePrecision) : | 239 (frame_rate) ? (frame_rate * media::kFrameRatePrecision) |
335 (kTypicalFramerate * media::kFrameRatePrecision); | 240 : (kTypicalFramerate * media::kFrameRatePrecision); |
336 | 241 |
337 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_PARM, &streamparm)) < | 242 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_PARM, &streamparm)) < |
338 0) { | 243 0) { |
339 SetErrorState("Failed to set camera framerate"); | 244 SetErrorState("Failed to set camera framerate"); |
340 return; | 245 return; |
341 } | 246 } |
342 DVLOG(2) << "Actual camera driverframerate: " | 247 DVLOG(2) << "Actual camera driverframerate: " |
343 << streamparm.parm.capture.timeperframe.denominator << "/" | 248 << streamparm.parm.capture.timeperframe.denominator << "/" |
344 << streamparm.parm.capture.timeperframe.numerator; | 249 << streamparm.parm.capture.timeperframe.numerator; |
345 } | 250 } |
346 } | 251 } |
347 // TODO(mcasas): what should be done if the camera driver does not allow | 252 // TODO(mcasas): what should be done if the camera driver does not allow |
348 // framerate configuration, or the actual one is different from the desired? | 253 // framerate configuration, or the actual one is different from the desired? |
349 | 254 |
350 // Set anti-banding/anti-flicker to 50/60Hz. May fail due to not supported | 255 // Set anti-banding/anti-flicker to 50/60Hz. May fail due to not supported |
351 // operation (|errno| == EINVAL in this case) or plain failure. | 256 // operation (|errno| == EINVAL in this case) or plain failure. |
352 if ((power_line_frequency_ == kPowerLine50Hz) || | 257 if ((power_line_frequency_ == V4L2_CID_POWER_LINE_FREQUENCY_50HZ) || |
353 (power_line_frequency_ == kPowerLine60Hz)) { | 258 (power_line_frequency_ == V4L2_CID_POWER_LINE_FREQUENCY_60HZ) || |
| 259 (power_line_frequency_ == V4L2_CID_POWER_LINE_FREQUENCY_AUTO)) { |
354 struct v4l2_control control = {}; | 260 struct v4l2_control control = {}; |
355 control.id = V4L2_CID_POWER_LINE_FREQUENCY; | 261 control.id = V4L2_CID_POWER_LINE_FREQUENCY; |
356 control.value = (power_line_frequency_ == kPowerLine50Hz) | 262 control.value = power_line_frequency_; |
357 ? V4L2_CID_POWER_LINE_FREQUENCY_50HZ | 263 const int retval = |
358 : V4L2_CID_POWER_LINE_FREQUENCY_60HZ; | 264 HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_CTRL, &control)); |
359 HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_CTRL, &control)); | 265 if (retval != 0) |
| 266 DVLOG(1) << "Error setting power line frequency removal"; |
360 } | 267 } |
361 | 268 |
362 capture_format_.frame_size.SetSize(video_fmt.fmt.pix.width, | 269 capture_format_.frame_size.SetSize(video_fmt_.fmt.pix.width, |
363 video_fmt.fmt.pix.height); | 270 video_fmt_.fmt.pix.height); |
364 capture_format_.frame_rate = frame_rate; | 271 capture_format_.frame_rate = frame_rate; |
365 capture_format_.pixel_format = | 272 capture_format_.pixel_format = pixel_format; |
366 V4l2FourCcToChromiumPixelFormat(video_fmt.fmt.pix.pixelformat); | |
367 | 273 |
368 if (!AllocateVideoBuffers()) { | 274 v4l2_requestbuffers r_buffer = {}; |
369 SetErrorState("Allocate buffer failed (Cannot recover from this error)"); | 275 r_buffer.type = capture_type_; |
| 276 r_buffer.memory = V4L2_MEMORY_MMAP; |
| 277 r_buffer.count = kNumVideoBuffers; |
| 278 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_REQBUFS, &r_buffer)) < 0) { |
| 279 SetErrorState("Error requesting MMAP buffers from V4L2"); |
370 return; | 280 return; |
371 } | 281 } |
| 282 for (unsigned int i = 0; i < r_buffer.count; ++i) { |
| 283 if (!MapAndQueueBuffer(i)) { |
| 284 SetErrorState("Allocate buffer failed"); |
| 285 return; |
| 286 } |
| 287 } |
372 | 288 |
373 const v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | 289 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_STREAMON, &capture_type_)) |
374 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_STREAMON, &type)) < 0) { | 290 < 0) { |
375 SetErrorState("VIDIOC_STREAMON failed"); | 291 SetErrorState("VIDIOC_STREAMON failed"); |
376 return; | 292 return; |
377 } | 293 } |
378 | 294 |
379 is_capturing_ = true; | 295 is_capturing_ = true; |
380 // Post task to start fetching frames from v4l2. | 296 // Post task to start fetching frames from v4l2. |
381 v4l2_task_runner_->PostTask( | 297 v4l2_task_runner_->PostTask( |
382 FROM_HERE, | 298 FROM_HERE, base::Bind(&V4L2CaptureDelegate::DoCapture, this)); |
383 base::Bind(&VideoCaptureDeviceLinux::V4L2CaptureDelegate::DoCapture, | |
384 this)); | |
385 } | 299 } |
386 | 300 |
387 void VideoCaptureDeviceLinux::V4L2CaptureDelegate::StopAndDeAllocate() { | 301 void V4L2CaptureDelegate::StopAndDeAllocate() { |
388 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); | 302 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); |
389 | 303 // The order is important: stop streaming, clear |buffer_pool_|, |
390 const v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | 304 // thus munmap()ing the v4l2_buffers, and then return them to the OS. |
391 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_STREAMOFF, &type)) < 0) { | 305 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_STREAMOFF, &capture_type_)) |
| 306 < 0) { |
392 SetErrorState("VIDIOC_STREAMOFF failed"); | 307 SetErrorState("VIDIOC_STREAMOFF failed"); |
393 return; | 308 return; |
394 } | 309 } |
395 // We don't dare to deallocate the buffers if we can't stop the capture | |
396 // device. | |
397 if (!DeAllocateVideoBuffers()) | |
398 SetErrorState("Failed to reset buffers"); | |
399 | 310 |
400 // We need to close and open the device if we want to change the settings. | 311 buffer_tracker_pool_.clear(); |
401 // Otherwise VIDIOC_S_FMT will return error. Sad but true. | 312 |
| 313 v4l2_requestbuffers r_buffer = {}; |
| 314 r_buffer.type = capture_type_; |
| 315 r_buffer.memory = V4L2_MEMORY_MMAP; |
| 316 r_buffer.count = 0; |
| 317 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_REQBUFS, &r_buffer)) < 0) |
| 318 SetErrorState("Failed to VIDIOC_REQBUFS with count = 0"); |
| 319 |
| 320 // At this point we can close the device. |
| 321 // This is also needed for correctly changing settings later via VIDIOC_S_FMT. |
402 device_fd_.reset(); | 322 device_fd_.reset(); |
403 is_capturing_ = false; | 323 is_capturing_ = false; |
404 client_.reset(); | 324 client_.reset(); |
405 } | 325 } |
406 | 326 |
407 void VideoCaptureDeviceLinux::V4L2CaptureDelegate::SetRotation(int rotation) { | 327 void V4L2CaptureDelegate::SetRotation(int rotation) { |
408 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); | 328 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); |
409 DCHECK(rotation >= 0 && rotation < 360 && rotation % 90 == 0); | 329 DCHECK(rotation >= 0 && rotation < 360 && rotation % 90 == 0); |
410 rotation_ = rotation; | 330 rotation_ = rotation; |
411 } | 331 } |
412 | 332 |
413 void VideoCaptureDeviceLinux::V4L2CaptureDelegate::DoCapture() { | 333 bool V4L2CaptureDelegate::MapAndQueueBuffer(int index) { |
| 334 v4l2_buffer buffer; |
| 335 FillV4L2Buffer(&buffer, index); |
| 336 |
| 337 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QUERYBUF, &buffer)) < 0) { |
| 338 DLOG(ERROR) << "Error querying status of a MMAP V4L2 buffer"; |
| 339 return false; |
| 340 } |
| 341 |
| 342 const scoped_refptr<BufferTracker>& buffer_tracker = CreateBufferTracker(); |
| 343 if (!buffer_tracker->Init(device_fd_.get(), buffer)) { |
| 344 DLOG(ERROR) << "Error creating BufferTracker"; |
| 345 return false; |
| 346 } |
| 347 buffer_tracker_pool_.push_back(buffer_tracker); |
| 348 |
| 349 // Enqueue the buffer in the drivers incoming queue. |
| 350 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QBUF, &buffer)) < 0) { |
| 351 DLOG(ERROR) << "Error enqueuing a V4L2 buffer back into the driver"; |
| 352 return false; |
| 353 } |
| 354 return true; |
| 355 } |
| 356 |
| 357 void V4L2CaptureDelegate::FillV4L2Buffer(v4l2_buffer* buffer, |
| 358 int i) const { |
| 359 memset(buffer, 0, sizeof(*buffer)); |
| 360 buffer->memory = V4L2_MEMORY_MMAP; |
| 361 buffer->index = i; |
| 362 FinishFillingV4L2Buffer(buffer); |
| 363 } |
| 364 |
| 365 void V4L2CaptureDelegate::DoCapture() { |
414 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); | 366 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); |
415 if (!is_capturing_) | 367 if (!is_capturing_) |
416 return; | 368 return; |
417 | 369 |
418 pollfd device_pfd = {}; | 370 pollfd device_pfd = {}; |
419 device_pfd.fd = device_fd_.get(); | 371 device_pfd.fd = device_fd_.get(); |
420 device_pfd.events = POLLIN; | 372 device_pfd.events = POLLIN; |
421 const int result = HANDLE_EINTR(poll(&device_pfd, 1, kCaptureTimeoutMs)); | 373 const int result = HANDLE_EINTR(poll(&device_pfd, 1, kCaptureTimeoutMs)); |
422 if (result < 0) { | 374 if (result < 0) { |
423 SetErrorState("Poll failed"); | 375 SetErrorState("Poll failed"); |
424 return; | 376 return; |
425 } | 377 } |
426 // Check if poll() timed out; track the amount of times it did in a row and | 378 // Check if poll() timed out; track the amount of times it did in a row and |
427 // throw an error if it times out too many times. | 379 // throw an error if it times out too many times. |
428 if (result == 0) { | 380 if (result == 0) { |
429 timeout_count_++; | 381 timeout_count_++; |
430 if (timeout_count_ >= kContinuousTimeoutLimit) { | 382 if (timeout_count_ >= kContinuousTimeoutLimit) { |
431 SetErrorState("Multiple continuous timeouts while read-polling."); | 383 SetErrorState("Multiple continuous timeouts while read-polling."); |
432 timeout_count_ = 0; | 384 timeout_count_ = 0; |
433 return; | 385 return; |
434 } | 386 } |
435 } else { | 387 } else { |
436 timeout_count_ = 0; | 388 timeout_count_ = 0; |
437 } | 389 } |
438 | 390 |
439 // Check if the driver has filled a buffer. | 391 // Deenqueue, send and reenqueue a buffer if the driver has filled one in. |
440 if (device_pfd.revents & POLLIN) { | 392 if (device_pfd.revents & POLLIN) { |
441 v4l2_buffer buffer = {}; | 393 v4l2_buffer buffer; |
442 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | 394 FillV4L2Buffer(&buffer, 0); |
443 buffer.memory = V4L2_MEMORY_MMAP; | 395 |
444 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_DQBUF, &buffer)) < 0) { | 396 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_DQBUF, &buffer)) < 0) { |
445 SetErrorState("Failed to dequeue capture buffer"); | 397 SetErrorState("Failed to dequeue capture buffer"); |
446 return; | 398 return; |
447 } | 399 } |
448 client_->OnIncomingCapturedData( | |
449 static_cast<uint8*>(buffer_pool_[buffer.index].start), | |
450 buffer.bytesused, | |
451 capture_format_, | |
452 rotation_, | |
453 base::TimeTicks::Now()); | |
454 | 400 |
455 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QBUF, &buffer)) < 0) | 401 SendBuffer(buffer_tracker_pool_[buffer.index], video_fmt_); |
| 402 |
| 403 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QBUF, &buffer)) < 0) { |
456 SetErrorState("Failed to enqueue capture buffer"); | 404 SetErrorState("Failed to enqueue capture buffer"); |
| 405 return; |
| 406 } |
457 } | 407 } |
458 | 408 |
459 v4l2_task_runner_->PostTask( | 409 v4l2_task_runner_->PostTask( |
460 FROM_HERE, | 410 FROM_HERE, base::Bind(&V4L2CaptureDelegate::DoCapture, this)); |
461 base::Bind(&VideoCaptureDeviceLinux::V4L2CaptureDelegate::DoCapture, | |
462 this)); | |
463 } | 411 } |
464 | 412 |
465 bool VideoCaptureDeviceLinux::V4L2CaptureDelegate::AllocateVideoBuffers() { | 413 void V4L2CaptureDelegate::SetErrorState(const std::string& reason) { |
466 v4l2_requestbuffers r_buffer = {}; | |
467 r_buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | |
468 r_buffer.memory = V4L2_MEMORY_MMAP; | |
469 r_buffer.count = kNumVideoBuffers; | |
470 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_REQBUFS, &r_buffer)) < 0) { | |
471 DLOG(ERROR) << "Error requesting MMAP buffers from V4L2"; | |
472 return false; | |
473 } | |
474 buffer_pool_size_ = r_buffer.count; | |
475 buffer_pool_ = new Buffer[buffer_pool_size_]; | |
476 for (unsigned int i = 0; i < r_buffer.count; ++i) { | |
477 v4l2_buffer buffer = {}; | |
478 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | |
479 buffer.memory = V4L2_MEMORY_MMAP; | |
480 buffer.index = i; | |
481 buffer.length = 1; | |
482 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QUERYBUF, &buffer)) < 0) { | |
483 DLOG(ERROR) << "Error querying status of a MMAP V4L2 buffer"; | |
484 return false; | |
485 } | |
486 | |
487 // Some devices require mmap() to be called with both READ and WRITE. | |
488 // See http://crbug.com/178582. | |
489 buffer_pool_[i].start = mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, | |
490 MAP_SHARED, device_fd_.get(), buffer.m.offset); | |
491 if (buffer_pool_[i].start == MAP_FAILED) { | |
492 DLOG(ERROR) << "Error mmmap()ing a V4L2 buffer into userspace"; | |
493 return false; | |
494 } | |
495 | |
496 buffer_pool_[i].length = buffer.length; | |
497 // Enqueue the buffer in the drivers incoming queue. | |
498 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QBUF, &buffer)) < 0) { | |
499 DLOG(ERROR) | |
500 << "Error enqueuing a V4L2 buffer back to the drivers incoming queue"; | |
501 return false; | |
502 } | |
503 } | |
504 return true; | |
505 } | |
506 | |
507 bool VideoCaptureDeviceLinux::V4L2CaptureDelegate::DeAllocateVideoBuffers() { | |
508 if (!buffer_pool_) | |
509 return true; | |
510 | |
511 for (int i = 0; i < buffer_pool_size_; ++i) | |
512 munmap(buffer_pool_[i].start, buffer_pool_[i].length); | |
513 | |
514 v4l2_requestbuffers r_buffer = {}; | |
515 r_buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | |
516 r_buffer.memory = V4L2_MEMORY_MMAP; | |
517 r_buffer.count = 0; | |
518 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_REQBUFS, &r_buffer)) < 0) | |
519 return false; | |
520 | |
521 delete [] buffer_pool_; | |
522 buffer_pool_ = NULL; | |
523 buffer_pool_size_ = 0; | |
524 return true; | |
525 } | |
526 | |
527 void VideoCaptureDeviceLinux::V4L2CaptureDelegate::SetErrorState( | |
528 const std::string& reason) { | |
529 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); | 414 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); |
530 is_capturing_ = false; | 415 is_capturing_ = false; |
531 client_->OnError(reason); | 416 client_->OnError(reason); |
532 } | 417 } |
533 | 418 |
534 } // namespace media | 419 } // namespace media |
OLD | NEW |