OLD | NEW |
---|---|
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "media/capture/video/linux/v4l2_capture_delegate.h" | 5 #include "media/capture/video/linux/v4l2_capture_delegate.h" |
6 | 6 |
7 #include <poll.h> | 7 #include <poll.h> |
8 #include <sys/fcntl.h> | 8 #include <sys/fcntl.h> |
9 #include <sys/ioctl.h> | 9 #include <sys/ioctl.h> |
10 #include <sys/mman.h> | 10 #include <sys/mman.h> |
11 #include <utility> | 11 #include <utility> |
12 | 12 |
13 #include "base/bind.h" | 13 #include "base/bind.h" |
14 #include "base/files/file_enumerator.h" | 14 #include "base/files/file_enumerator.h" |
15 #include "base/posix/eintr_wrapper.h" | 15 #include "base/posix/eintr_wrapper.h" |
16 #include "base/strings/stringprintf.h" | 16 #include "base/strings/stringprintf.h" |
17 #include "build/build_config.h" | 17 #include "build/build_config.h" |
18 #include "media/base/bind_to_current_loop.h" | 18 #include "media/base/bind_to_current_loop.h" |
19 #include "media/capture/video/linux/v4l2_capture_delegate_multi_plane.h" | |
20 #include "media/capture/video/linux/v4l2_capture_delegate_single_plane.h" | |
21 #include "media/capture/video/linux/video_capture_device_linux.h" | 19 #include "media/capture/video/linux/video_capture_device_linux.h" |
22 | 20 |
23 namespace media { | 21 namespace media { |
24 | 22 |
25 // Desired number of video buffers to allocate. The actual number of allocated | 23 // Desired number of video buffers to allocate. The actual number of allocated |
26 // buffers by v4l2 driver can be higher or lower than this number. | 24 // buffers by v4l2 driver can be higher or lower than this number. |
27 // kNumVideoBuffers should not be too small, or Chrome may not return enough | 25 // kNumVideoBuffers should not be too small, or Chrome may not return enough |
28 // buffers back to driver in time. | 26 // buffers back to driver in time. |
29 const uint32_t kNumVideoBuffers = 4; | 27 const uint32_t kNumVideoBuffers = 4; |
30 // Timeout in milliseconds v4l2_thread_ blocks waiting for a frame from the hw. | 28 // Timeout in milliseconds v4l2_thread_ blocks waiting for a frame from the hw. |
(...skipping 10 matching lines...) Expand all Loading... | |
41 // This list is ordered by precedence of use -- but see caveats for MJPEG. | 39 // This list is ordered by precedence of use -- but see caveats for MJPEG. |
42 static struct { | 40 static struct { |
43 uint32_t fourcc; | 41 uint32_t fourcc; |
44 VideoPixelFormat pixel_format; | 42 VideoPixelFormat pixel_format; |
45 size_t num_planes; | 43 size_t num_planes; |
46 } const kSupportedFormatsAndPlanarity[] = { | 44 } const kSupportedFormatsAndPlanarity[] = { |
47 {V4L2_PIX_FMT_YUV420, PIXEL_FORMAT_I420, 1}, | 45 {V4L2_PIX_FMT_YUV420, PIXEL_FORMAT_I420, 1}, |
48 {V4L2_PIX_FMT_YUYV, PIXEL_FORMAT_YUY2, 1}, | 46 {V4L2_PIX_FMT_YUYV, PIXEL_FORMAT_YUY2, 1}, |
49 {V4L2_PIX_FMT_UYVY, PIXEL_FORMAT_UYVY, 1}, | 47 {V4L2_PIX_FMT_UYVY, PIXEL_FORMAT_UYVY, 1}, |
50 {V4L2_PIX_FMT_RGB24, PIXEL_FORMAT_RGB24, 1}, | 48 {V4L2_PIX_FMT_RGB24, PIXEL_FORMAT_RGB24, 1}, |
51 #if !defined(OS_OPENBSD) | |
52 // TODO(mcasas): add V4L2_PIX_FMT_YVU420M when available in bots. | |
53 {V4L2_PIX_FMT_YUV420M, PIXEL_FORMAT_I420, 3}, | |
54 #endif | |
55 // MJPEG is usually sitting fairly low since we don't want to have to | 49 // MJPEG is usually sitting fairly low since we don't want to have to |
56 // decode. | 50 // decode. However, is needed for large resolutions due to USB bandwidth |
perkj_chrome
2016/02/12 11:01:40
nit s it is needed....
mcasas
2016/02/12 21:32:43
Done.
| |
57 // However, is needed for large resolutions due to USB bandwidth | 51 // limitations, so GetListOfUsableFourCcs() can duplicate it on top, see |
58 // limitations, | 52 // that method. |
59 // so GetListOfUsableFourCcs() can duplicate it on top, see that method. | |
60 {V4L2_PIX_FMT_MJPEG, PIXEL_FORMAT_MJPEG, 1}, | 53 {V4L2_PIX_FMT_MJPEG, PIXEL_FORMAT_MJPEG, 1}, |
61 // JPEG works as MJPEG on some gspca webcams from field reports, see | 54 // JPEG works as MJPEG on some gspca webcams from field reports, see |
62 // https://code.google.com/p/webrtc/issues/detail?id=529, put it as the | 55 // https://code.google.com/p/webrtc/issues/detail?id=529, put it as the |
63 // least | 56 // least preferred format. |
64 // preferred format. | |
65 {V4L2_PIX_FMT_JPEG, PIXEL_FORMAT_MJPEG, 1}, | 57 {V4L2_PIX_FMT_JPEG, PIXEL_FORMAT_MJPEG, 1}, |
66 }; | 58 }; |
67 | 59 |
68 // static | 60 // Fill in |format| with the given parameters. |
69 scoped_refptr<V4L2CaptureDelegate> | 61 static void FillV4L2Format(v4l2_format* format, |
70 V4L2CaptureDelegate::CreateV4L2CaptureDelegate( | 62 uint32_t width, |
71 const VideoCaptureDevice::Name& device_name, | 63 uint32_t height, |
72 const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner, | 64 uint32_t pixelformat_fourcc) { |
73 int power_line_frequency) { | 65 memset(format, 0, sizeof(*format)); |
74 switch (device_name.capture_api_type()) { | 66 format->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
75 case VideoCaptureDevice::Name::V4L2_SINGLE_PLANE: | 67 format->fmt.pix.width = width; |
76 return make_scoped_refptr(new V4L2CaptureDelegateSinglePlane( | 68 format->fmt.pix.height = height; |
77 device_name, v4l2_task_runner, power_line_frequency)); | 69 format->fmt.pix.pixelformat = pixelformat_fourcc; |
78 case VideoCaptureDevice::Name::V4L2_MULTI_PLANE: | 70 } |
79 #if !defined(OS_OPENBSD) | 71 |
80 return make_scoped_refptr(new V4L2CaptureDelegateMultiPlane( | 72 // Fills all parts of |buffer|. |
81 device_name, v4l2_task_runner, power_line_frequency)); | 73 static void FillV4L2Buffer(v4l2_buffer* buffer, int index) { |
82 default: | 74 memset(buffer, 0, sizeof(*buffer)); |
83 #endif | 75 buffer->memory = V4L2_MEMORY_MMAP; |
84 NOTIMPLEMENTED() << "Unknown V4L2 capture API type"; | 76 buffer->index = index; |
85 return scoped_refptr<V4L2CaptureDelegate>(); | 77 buffer->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
78 } | |
79 | |
80 static void FillV4L2RequestBuffer(v4l2_requestbuffers* request_buffer, | |
81 int count) { | |
82 memset(request_buffer, 0, sizeof(*request_buffer)); | |
83 request_buffer->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | |
84 request_buffer->memory = V4L2_MEMORY_MMAP; | |
85 request_buffer->count = count; | |
86 } | |
87 | |
88 // Returns the input |fourcc| as a std::string four char representation. | |
89 static std::string FourccToString(uint32_t fourcc) { | |
90 return base::StringPrintf("%c%c%c%c", fourcc & 0xFF, (fourcc >> 8) & 0xFF, | |
91 (fourcc >> 16) & 0xFF, (fourcc >> 24) & 0xFF); | |
92 } | |
93 | |
94 // Class keeping track of a SPLANE V4L2 buffer, mmap()ed on construction and | |
95 // munmap()ed on destruction. | |
96 class V4L2CaptureDelegate::BufferTracker | |
97 : public base::RefCounted<BufferTracker> { | |
98 public: | |
99 BufferTracker(); | |
100 // Abstract method to mmap() given |fd| according to |buffer|. | |
101 bool Init(int fd, const v4l2_buffer& buffer); | |
102 | |
103 const uint8_t* start() const { return start_; } | |
104 size_t payload_size() const { return payload_size_; } | |
105 void set_payload_size(size_t payload_size) { | |
106 DCHECK_LE(payload_size, length_); | |
107 payload_size_ = payload_size; | |
86 } | 108 } |
87 } | 109 |
110 private: | |
111 friend class base::RefCounted<BufferTracker>; | |
112 virtual ~BufferTracker(); | |
113 | |
114 uint8_t* start_; | |
115 size_t length_; | |
116 size_t payload_size_; | |
117 }; | |
88 | 118 |
89 // static | 119 // static |
90 size_t V4L2CaptureDelegate::GetNumPlanesForFourCc(uint32_t fourcc) { | 120 size_t V4L2CaptureDelegate::GetNumPlanesForFourCc(uint32_t fourcc) { |
91 for (const auto& fourcc_and_pixel_format : kSupportedFormatsAndPlanarity) { | 121 for (const auto& fourcc_and_pixel_format : kSupportedFormatsAndPlanarity) { |
92 if (fourcc_and_pixel_format.fourcc == fourcc) | 122 if (fourcc_and_pixel_format.fourcc == fourcc) |
93 return fourcc_and_pixel_format.num_planes; | 123 return fourcc_and_pixel_format.num_planes; |
94 } | 124 } |
95 DVLOG(1) << "Unknown fourcc " << FourccToString(fourcc); | 125 DVLOG(1) << "Unknown fourcc " << FourccToString(fourcc); |
96 return 0; | 126 return 0; |
97 } | 127 } |
(...skipping 19 matching lines...) Expand all Loading... | |
117 for (const auto& format : kSupportedFormatsAndPlanarity) | 147 for (const auto& format : kSupportedFormatsAndPlanarity) |
118 supported_formats.push_back(format.fourcc); | 148 supported_formats.push_back(format.fourcc); |
119 | 149 |
120 // Duplicate MJPEG on top of the list depending on |prefer_mjpeg|. | 150 // Duplicate MJPEG on top of the list depending on |prefer_mjpeg|. |
121 if (prefer_mjpeg) | 151 if (prefer_mjpeg) |
122 supported_formats.push_front(V4L2_PIX_FMT_MJPEG); | 152 supported_formats.push_front(V4L2_PIX_FMT_MJPEG); |
123 | 153 |
124 return supported_formats; | 154 return supported_formats; |
125 } | 155 } |
126 | 156 |
127 // static | |
128 std::string V4L2CaptureDelegate::FourccToString(uint32_t fourcc) { | |
129 return base::StringPrintf("%c%c%c%c", fourcc & 0xFF, (fourcc >> 8) & 0xFF, | |
130 (fourcc >> 16) & 0xFF, (fourcc >> 24) & 0xFF); | |
131 } | |
132 | |
133 V4L2CaptureDelegate::BufferTracker::BufferTracker() { | |
134 } | |
135 | |
136 V4L2CaptureDelegate::BufferTracker::~BufferTracker() { | |
137 for (const auto& plane : planes_) { | |
138 if (plane.start == nullptr) | |
139 continue; | |
140 const int result = munmap(plane.start, plane.length); | |
141 PLOG_IF(ERROR, result < 0) << "Error munmap()ing V4L2 buffer"; | |
142 } | |
143 } | |
144 | |
145 void V4L2CaptureDelegate::BufferTracker::AddMmapedPlane(uint8_t* const start, | |
146 size_t length) { | |
147 Plane plane; | |
148 plane.start = start; | |
149 plane.length = length; | |
150 plane.payload_size = 0; | |
151 planes_.push_back(plane); | |
152 } | |
153 | |
154 V4L2CaptureDelegate::V4L2CaptureDelegate( | 157 V4L2CaptureDelegate::V4L2CaptureDelegate( |
155 const VideoCaptureDevice::Name& device_name, | 158 const VideoCaptureDevice::Name& device_name, |
156 const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner, | 159 const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner, |
157 int power_line_frequency) | 160 int power_line_frequency) |
158 : capture_type_((device_name.capture_api_type() == | 161 : v4l2_task_runner_(v4l2_task_runner), |
159 VideoCaptureDevice::Name::V4L2_SINGLE_PLANE) | |
160 ? V4L2_BUF_TYPE_VIDEO_CAPTURE | |
161 : V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE), | |
162 v4l2_task_runner_(v4l2_task_runner), | |
163 device_name_(device_name), | 162 device_name_(device_name), |
164 power_line_frequency_(power_line_frequency), | 163 power_line_frequency_(power_line_frequency), |
165 is_capturing_(false), | 164 is_capturing_(false), |
166 timeout_count_(0), | 165 timeout_count_(0), |
167 rotation_(0) { | 166 rotation_(0) {} |
168 } | |
169 | |
170 V4L2CaptureDelegate::~V4L2CaptureDelegate() { | |
171 } | |
172 | 167 |
173 void V4L2CaptureDelegate::AllocateAndStart( | 168 void V4L2CaptureDelegate::AllocateAndStart( |
174 int width, | 169 int width, |
175 int height, | 170 int height, |
176 float frame_rate, | 171 float frame_rate, |
177 scoped_ptr<VideoCaptureDevice::Client> client) { | 172 scoped_ptr<VideoCaptureDevice::Client> client) { |
178 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); | 173 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); |
179 DCHECK(client); | 174 DCHECK(client); |
180 client_ = std::move(client); | 175 client_ = std::move(client); |
181 | 176 |
182 // Need to open camera with O_RDWR after Linux kernel 3.3. | 177 // Need to open camera with O_RDWR after Linux kernel 3.3. |
183 device_fd_.reset(HANDLE_EINTR(open(device_name_.id().c_str(), O_RDWR))); | 178 device_fd_.reset(HANDLE_EINTR(open(device_name_.id().c_str(), O_RDWR))); |
184 if (!device_fd_.is_valid()) { | 179 if (!device_fd_.is_valid()) { |
185 SetErrorState(FROM_HERE, "Failed to open V4L2 device driver file."); | 180 SetErrorState(FROM_HERE, "Failed to open V4L2 device driver file."); |
186 return; | 181 return; |
187 } | 182 } |
188 | 183 |
189 v4l2_capability cap = {}; | 184 v4l2_capability cap = {}; |
190 if (!((HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QUERYCAP, &cap)) == 0) && | 185 if (!((HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QUERYCAP, &cap)) == 0) && |
191 ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE || | 186 ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) && |
192 cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE) && | 187 !(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT)))) { |
193 !(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT) && | |
194 !(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT_MPLANE)))) { | |
195 device_fd_.reset(); | 188 device_fd_.reset(); |
196 SetErrorState(FROM_HERE, "This is not a V4L2 video capture device"); | 189 SetErrorState(FROM_HERE, "This is not a V4L2 video capture device"); |
197 return; | 190 return; |
198 } | 191 } |
199 | 192 |
200 // Get supported video formats in preferred order. | 193 // Get supported video formats in preferred order. For large resolutions, |
201 // For large resolutions, favour mjpeg over raw formats. | 194 // favour mjpeg over raw formats. |
202 const std::list<uint32_t>& desired_v4l2_formats = | 195 const std::list<uint32_t>& desired_v4l2_formats = |
203 GetListOfUsableFourCcs(width > kMjpegWidth || height > kMjpegHeight); | 196 GetListOfUsableFourCcs(width > kMjpegWidth || height > kMjpegHeight); |
204 std::list<uint32_t>::const_iterator best = desired_v4l2_formats.end(); | 197 std::list<uint32_t>::const_iterator best = desired_v4l2_formats.end(); |
205 | 198 |
206 v4l2_fmtdesc fmtdesc = {}; | 199 v4l2_fmtdesc fmtdesc = {}; |
207 fmtdesc.type = capture_type_; | 200 fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
208 for (; HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_ENUM_FMT, &fmtdesc)) == 0; | 201 for (; HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_ENUM_FMT, &fmtdesc)) == 0; |
209 ++fmtdesc.index) { | 202 ++fmtdesc.index) { |
210 best = std::find(desired_v4l2_formats.begin(), best, fmtdesc.pixelformat); | 203 best = std::find(desired_v4l2_formats.begin(), best, fmtdesc.pixelformat); |
211 } | 204 } |
212 if (best == desired_v4l2_formats.end()) { | 205 if (best == desired_v4l2_formats.end()) { |
213 SetErrorState(FROM_HERE, "Failed to find a supported camera format."); | 206 SetErrorState(FROM_HERE, "Failed to find a supported camera format."); |
214 return; | 207 return; |
215 } | 208 } |
216 | 209 |
217 DVLOG(1) << "Chosen pixel format is " << FourccToString(*best); | 210 DVLOG(1) << "Chosen pixel format is " << FourccToString(*best); |
218 | 211 FillV4L2Format(&video_fmt_, width, height, *best); |
219 video_fmt_.type = capture_type_; | |
220 if (!FillV4L2Format(&video_fmt_, width, height, *best)) { | |
221 SetErrorState(FROM_HERE, "Failed filling in V4L2 Format"); | |
222 return; | |
223 } | |
224 | 212 |
225 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_FMT, &video_fmt_)) < 0) { | 213 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_FMT, &video_fmt_)) < 0) { |
226 SetErrorState(FROM_HERE, "Failed to set video capture format"); | 214 SetErrorState(FROM_HERE, "Failed to set video capture format"); |
227 return; | 215 return; |
228 } | 216 } |
229 const VideoPixelFormat pixel_format = | 217 const VideoPixelFormat pixel_format = |
230 V4l2FourCcToChromiumPixelFormat(video_fmt_.fmt.pix.pixelformat); | 218 V4l2FourCcToChromiumPixelFormat(video_fmt_.fmt.pix.pixelformat); |
231 if (pixel_format == PIXEL_FORMAT_UNKNOWN) { | 219 if (pixel_format == PIXEL_FORMAT_UNKNOWN) { |
232 SetErrorState(FROM_HERE, "Unsupported pixel format"); | 220 SetErrorState(FROM_HERE, "Unsupported pixel format"); |
233 return; | 221 return; |
234 } | 222 } |
235 | 223 |
236 // Set capture framerate in the form of capture interval. | 224 // Set capture framerate in the form of capture interval. |
237 v4l2_streamparm streamparm = {}; | 225 v4l2_streamparm streamparm = {}; |
238 streamparm.type = capture_type_; | 226 streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
239 // The following line checks that the driver knows about framerate get/set. | 227 // The following line checks that the driver knows about framerate get/set. |
240 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_G_PARM, &streamparm)) >= 0) { | 228 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_G_PARM, &streamparm)) >= 0) { |
241 // Now check if the device is able to accept a capture framerate set. | 229 // Now check if the device is able to accept a capture framerate set. |
242 if (streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) { | 230 if (streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) { |
243 // |frame_rate| is float, approximate by a fraction. | 231 // |frame_rate| is float, approximate by a fraction. |
244 streamparm.parm.capture.timeperframe.numerator = | 232 streamparm.parm.capture.timeperframe.numerator = |
245 media::kFrameRatePrecision; | 233 media::kFrameRatePrecision; |
246 streamparm.parm.capture.timeperframe.denominator = | 234 streamparm.parm.capture.timeperframe.denominator = |
247 (frame_rate) ? (frame_rate * media::kFrameRatePrecision) | 235 (frame_rate) ? (frame_rate * media::kFrameRatePrecision) |
248 : (kTypicalFramerate * media::kFrameRatePrecision); | 236 : (kTypicalFramerate * media::kFrameRatePrecision); |
(...skipping 23 matching lines...) Expand all Loading... | |
272 HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_CTRL, &control)); | 260 HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_CTRL, &control)); |
273 if (retval != 0) | 261 if (retval != 0) |
274 DVLOG(1) << "Error setting power line frequency removal"; | 262 DVLOG(1) << "Error setting power line frequency removal"; |
275 } | 263 } |
276 | 264 |
277 capture_format_.frame_size.SetSize(video_fmt_.fmt.pix.width, | 265 capture_format_.frame_size.SetSize(video_fmt_.fmt.pix.width, |
278 video_fmt_.fmt.pix.height); | 266 video_fmt_.fmt.pix.height); |
279 capture_format_.frame_rate = frame_rate; | 267 capture_format_.frame_rate = frame_rate; |
280 capture_format_.pixel_format = pixel_format; | 268 capture_format_.pixel_format = pixel_format; |
281 | 269 |
282 v4l2_requestbuffers r_buffer = {}; | 270 v4l2_requestbuffers r_buffer; |
283 r_buffer.type = capture_type_; | 271 FillV4L2RequestBuffer(&r_buffer, kNumVideoBuffers); |
284 r_buffer.memory = V4L2_MEMORY_MMAP; | |
285 r_buffer.count = kNumVideoBuffers; | |
286 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_REQBUFS, &r_buffer)) < 0) { | 272 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_REQBUFS, &r_buffer)) < 0) { |
287 SetErrorState(FROM_HERE, "Error requesting MMAP buffers from V4L2"); | 273 SetErrorState(FROM_HERE, "Error requesting MMAP buffers from V4L2"); |
288 return; | 274 return; |
289 } | 275 } |
290 for (unsigned int i = 0; i < r_buffer.count; ++i) { | 276 for (unsigned int i = 0; i < r_buffer.count; ++i) { |
291 if (!MapAndQueueBuffer(i)) { | 277 if (!MapAndQueueBuffer(i)) { |
292 SetErrorState(FROM_HERE, "Allocate buffer failed"); | 278 SetErrorState(FROM_HERE, "Allocate buffer failed"); |
293 return; | 279 return; |
294 } | 280 } |
295 } | 281 } |
296 | 282 |
297 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_STREAMON, &capture_type_)) < | 283 v4l2_buf_type capture_type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
284 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_STREAMON, &capture_type)) < | |
298 0) { | 285 0) { |
299 SetErrorState(FROM_HERE, "VIDIOC_STREAMON failed"); | 286 SetErrorState(FROM_HERE, "VIDIOC_STREAMON failed"); |
300 return; | 287 return; |
301 } | 288 } |
302 | 289 |
303 is_capturing_ = true; | 290 is_capturing_ = true; |
304 // Post task to start fetching frames from v4l2. | 291 // Post task to start fetching frames from v4l2. |
305 v4l2_task_runner_->PostTask( | 292 v4l2_task_runner_->PostTask( |
306 FROM_HERE, base::Bind(&V4L2CaptureDelegate::DoCapture, this)); | 293 FROM_HERE, base::Bind(&V4L2CaptureDelegate::DoCapture, AsWeakPtr())); |
307 } | 294 } |
308 | 295 |
309 void V4L2CaptureDelegate::StopAndDeAllocate() { | 296 void V4L2CaptureDelegate::StopAndDeAllocate() { |
310 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); | 297 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); |
311 // The order is important: stop streaming, clear |buffer_pool_|, | 298 // The order is important: stop streaming, clear |buffer_pool_|, |
312 // thus munmap()ing the v4l2_buffers, and then return them to the OS. | 299 // thus munmap()ing the v4l2_buffers, and then return them to the OS. |
313 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_STREAMOFF, &capture_type_)) < | 300 v4l2_buf_type capture_type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
301 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_STREAMOFF, &capture_type)) < | |
314 0) { | 302 0) { |
315 SetErrorState(FROM_HERE, "VIDIOC_STREAMOFF failed"); | 303 SetErrorState(FROM_HERE, "VIDIOC_STREAMOFF failed"); |
316 return; | 304 return; |
317 } | 305 } |
318 | 306 |
319 buffer_tracker_pool_.clear(); | 307 buffer_tracker_pool_.clear(); |
320 | 308 |
321 v4l2_requestbuffers r_buffer = {}; | 309 v4l2_requestbuffers r_buffer; |
322 r_buffer.type = capture_type_; | 310 FillV4L2RequestBuffer(&r_buffer, 0); |
323 r_buffer.memory = V4L2_MEMORY_MMAP; | |
324 r_buffer.count = 0; | |
325 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_REQBUFS, &r_buffer)) < 0) | 311 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_REQBUFS, &r_buffer)) < 0) |
326 SetErrorState(FROM_HERE, "Failed to VIDIOC_REQBUFS with count = 0"); | 312 SetErrorState(FROM_HERE, "Failed to VIDIOC_REQBUFS with count = 0"); |
327 | 313 |
328 // At this point we can close the device. | 314 // At this point we can close the device. |
329 // This is also needed for correctly changing settings later via VIDIOC_S_FMT. | 315 // This is also needed for correctly changing settings later via VIDIOC_S_FMT. |
330 device_fd_.reset(); | 316 device_fd_.reset(); |
331 is_capturing_ = false; | 317 is_capturing_ = false; |
332 client_.reset(); | 318 client_.reset(); |
333 } | 319 } |
334 | 320 |
335 void V4L2CaptureDelegate::SetRotation(int rotation) { | 321 void V4L2CaptureDelegate::SetRotation(int rotation) { |
336 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); | 322 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); |
337 DCHECK(rotation >= 0 && rotation < 360 && rotation % 90 == 0); | 323 DCHECK(rotation >= 0 && rotation < 360 && rotation % 90 == 0); |
338 rotation_ = rotation; | 324 rotation_ = rotation; |
339 } | 325 } |
340 | 326 |
327 V4L2CaptureDelegate::~V4L2CaptureDelegate() {} | |
328 | |
341 bool V4L2CaptureDelegate::MapAndQueueBuffer(int index) { | 329 bool V4L2CaptureDelegate::MapAndQueueBuffer(int index) { |
342 v4l2_buffer buffer; | 330 v4l2_buffer buffer; |
343 FillV4L2Buffer(&buffer, index); | 331 FillV4L2Buffer(&buffer, index); |
344 | 332 |
345 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QUERYBUF, &buffer)) < 0) { | 333 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QUERYBUF, &buffer)) < 0) { |
346 DLOG(ERROR) << "Error querying status of a MMAP V4L2 buffer"; | 334 DLOG(ERROR) << "Error querying status of a MMAP V4L2 buffer"; |
347 return false; | 335 return false; |
348 } | 336 } |
349 | 337 |
350 const scoped_refptr<BufferTracker>& buffer_tracker = CreateBufferTracker(); | 338 const scoped_refptr<BufferTracker> buffer_tracker(new BufferTracker()); |
351 if (!buffer_tracker->Init(device_fd_.get(), buffer)) { | 339 if (!buffer_tracker->Init(device_fd_.get(), buffer)) { |
352 DLOG(ERROR) << "Error creating BufferTracker"; | 340 DLOG(ERROR) << "Error creating BufferTracker"; |
353 return false; | 341 return false; |
354 } | 342 } |
355 buffer_tracker_pool_.push_back(buffer_tracker); | 343 buffer_tracker_pool_.push_back(buffer_tracker); |
356 | 344 |
357 // Enqueue the buffer in the drivers incoming queue. | 345 // Enqueue the buffer in the drivers incoming queue. |
358 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QBUF, &buffer)) < 0) { | 346 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QBUF, &buffer)) < 0) { |
359 DLOG(ERROR) << "Error enqueuing a V4L2 buffer back into the driver"; | 347 DLOG(ERROR) << "Error enqueuing a V4L2 buffer back into the driver"; |
360 return false; | 348 return false; |
361 } | 349 } |
362 return true; | 350 return true; |
363 } | 351 } |
364 | 352 |
365 void V4L2CaptureDelegate::FillV4L2Buffer(v4l2_buffer* buffer, int i) const { | |
366 memset(buffer, 0, sizeof(*buffer)); | |
367 buffer->memory = V4L2_MEMORY_MMAP; | |
368 buffer->index = i; | |
369 FinishFillingV4L2Buffer(buffer); | |
370 } | |
371 | |
372 void V4L2CaptureDelegate::DoCapture() { | 353 void V4L2CaptureDelegate::DoCapture() { |
373 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); | 354 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); |
374 if (!is_capturing_) | 355 if (!is_capturing_) |
375 return; | 356 return; |
376 | 357 |
377 pollfd device_pfd = {}; | 358 pollfd device_pfd = {}; |
378 device_pfd.fd = device_fd_.get(); | 359 device_pfd.fd = device_fd_.get(); |
379 device_pfd.events = POLLIN; | 360 device_pfd.events = POLLIN; |
380 const int result = HANDLE_EINTR(poll(&device_pfd, 1, kCaptureTimeoutMs)); | 361 const int result = HANDLE_EINTR(poll(&device_pfd, 1, kCaptureTimeoutMs)); |
381 if (result < 0) { | 362 if (result < 0) { |
(...skipping 17 matching lines...) Expand all Loading... | |
399 // Deenqueue, send and reenqueue a buffer if the driver has filled one in. | 380 // Deenqueue, send and reenqueue a buffer if the driver has filled one in. |
400 if (device_pfd.revents & POLLIN) { | 381 if (device_pfd.revents & POLLIN) { |
401 v4l2_buffer buffer; | 382 v4l2_buffer buffer; |
402 FillV4L2Buffer(&buffer, 0); | 383 FillV4L2Buffer(&buffer, 0); |
403 | 384 |
404 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_DQBUF, &buffer)) < 0) { | 385 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_DQBUF, &buffer)) < 0) { |
405 SetErrorState(FROM_HERE, "Failed to dequeue capture buffer"); | 386 SetErrorState(FROM_HERE, "Failed to dequeue capture buffer"); |
406 return; | 387 return; |
407 } | 388 } |
408 | 389 |
409 SetPayloadSize(buffer_tracker_pool_[buffer.index], buffer); | 390 buffer_tracker_pool_[buffer.index]->set_payload_size(buffer.bytesused); |
410 SendBuffer(buffer_tracker_pool_[buffer.index], video_fmt_); | 391 const scoped_refptr<BufferTracker>& buffer_tracker = |
392 buffer_tracker_pool_[buffer.index]; | |
393 client_->OnIncomingCapturedData( | |
394 buffer_tracker->start(), buffer_tracker->payload_size(), | |
395 capture_format_, rotation_, base::TimeTicks::Now()); | |
411 | 396 |
412 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QBUF, &buffer)) < 0) { | 397 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QBUF, &buffer)) < 0) { |
413 SetErrorState(FROM_HERE, "Failed to enqueue capture buffer"); | 398 SetErrorState(FROM_HERE, "Failed to enqueue capture buffer"); |
414 return; | 399 return; |
415 } | 400 } |
416 } | 401 } |
417 | 402 |
418 v4l2_task_runner_->PostTask( | 403 v4l2_task_runner_->PostTask( |
419 FROM_HERE, base::Bind(&V4L2CaptureDelegate::DoCapture, this)); | 404 FROM_HERE, base::Bind(&V4L2CaptureDelegate::DoCapture, AsWeakPtr())); |
perkj_chrome
2016/02/12 11:01:40
If I remember the comments correct. WeaPtrs are no
mcasas
2016/02/12 21:32:43
Added a TODO somewhere and leaving this for
anothe
| |
420 } | 405 } |
421 | 406 |
422 void V4L2CaptureDelegate::SetErrorState( | 407 void V4L2CaptureDelegate::SetErrorState( |
423 const tracked_objects::Location& from_here, | 408 const tracked_objects::Location& from_here, |
424 const std::string& reason) { | 409 const std::string& reason) { |
425 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); | 410 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); |
426 is_capturing_ = false; | 411 is_capturing_ = false; |
427 client_->OnError(from_here, reason); | 412 client_->OnError(from_here, reason); |
428 } | 413 } |
429 | 414 |
415 V4L2CaptureDelegate::BufferTracker::BufferTracker() {} | |
416 | |
417 V4L2CaptureDelegate::BufferTracker::~BufferTracker() { | |
418 if (start_ == nullptr) | |
419 return; | |
420 const int result = munmap(start_, length_); | |
421 PLOG_IF(ERROR, result < 0) << "Error munmap()ing V4L2 buffer"; | |
422 } | |
423 | |
424 bool V4L2CaptureDelegate::BufferTracker::Init(int fd, | |
425 const v4l2_buffer& buffer) { | |
426 // Some devices require mmap() to be called with both READ and WRITE. | |
427 // See http://crbug.com/178582. | |
428 void* const start = mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, | |
429 MAP_SHARED, fd, buffer.m.offset); | |
430 if (start == MAP_FAILED) { | |
431 DLOG(ERROR) << "Error mmap()ing a V4L2 buffer into userspace"; | |
432 return false; | |
433 } | |
434 start_ = static_cast<uint8_t*>(start); | |
435 length_ = buffer.length; | |
436 payload_size_ = 0; | |
437 return true; | |
438 } | |
439 | |
430 } // namespace media | 440 } // namespace media |
OLD | NEW |