OLD | NEW |
| (Empty) |
1 // Copyright 2015 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include <errno.h> | |
6 #include <linux/videodev2.h> | |
7 #include <string.h> | |
8 #include <sys/mman.h> | |
9 | |
10 #include "base/big_endian.h" | |
11 #include "base/bind.h" | |
12 #include "base/thread_task_runner_handle.h" | |
13 #include "content/common/gpu/media/v4l2_jpeg_decode_accelerator.h" | |
14 #include "media/filters/jpeg_parser.h" | |
15 #include "third_party/libyuv/include/libyuv.h" | |
16 | |
17 #define IOCTL_OR_ERROR_RETURN_VALUE(type, arg, value, type_name) \ | |
18 do { \ | |
19 if (device_->Ioctl(type, arg) != 0) { \ | |
20 PLOG(ERROR) << __func__ << "(): ioctl() failed: " << type_name; \ | |
21 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); \ | |
22 return value; \ | |
23 } \ | |
24 } while (0) | |
25 | |
26 #define IOCTL_OR_ERROR_RETURN(type, arg) \ | |
27 IOCTL_OR_ERROR_RETURN_VALUE(type, arg, ((void)0), #type) | |
28 | |
29 #define IOCTL_OR_ERROR_RETURN_FALSE(type, arg) \ | |
30 IOCTL_OR_ERROR_RETURN_VALUE(type, arg, false, #type) | |
31 | |
32 #define IOCTL_OR_LOG_ERROR(type, arg) \ | |
33 do { \ | |
34 if (device_->Ioctl(type, arg) != 0) { \ | |
35 PLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \ | |
36 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); \ | |
37 } \ | |
38 } while (0) | |
39 | |
40 #define READ_U8_OR_RETURN_FALSE(reader, out) \ | |
41 do { \ | |
42 uint8_t _out; \ | |
43 if (!reader.ReadU8(&_out)) { \ | |
44 DVLOG(1) \ | |
45 << "Error in stream: unexpected EOS while trying to read " #out; \ | |
46 return false; \ | |
47 } \ | |
48 *(out) = _out; \ | |
49 } while (0) | |
50 | |
51 #define READ_U16_OR_RETURN_FALSE(reader, out) \ | |
52 do { \ | |
53 uint16_t _out; \ | |
54 if (!reader.ReadU16(&_out)) { \ | |
55 DVLOG(1) \ | |
56 << "Error in stream: unexpected EOS while trying to read " #out; \ | |
57 return false; \ | |
58 } \ | |
59 *(out) = _out; \ | |
60 } while (0) | |
61 | |
62 namespace content { | |
63 | |
64 // This is default huffman segment for 8-bit precision luminance and | |
65 // chrominance. The default huffman segment is constructed with the tables from | |
66 // JPEG standard section K.3. Actually there are no default tables. They are | |
67 // typical tables. These tables are useful for many applications. Lots of | |
68 // softwares use them as standard tables such as ffmpeg. | |
69 const uint8_t kDefaultDhtSeg[] = { | |
70 0xFF, 0xC4, 0x01, 0xA2, 0x00, 0x00, 0x01, 0x05, 0x01, 0x01, 0x01, 0x01, | |
71 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x02, | |
72 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x01, 0x00, 0x03, | |
73 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, | |
74 0x00, 0x00, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, | |
75 0x0A, 0x0B, 0x10, 0x00, 0x02, 0x01, 0x03, 0x03, 0x02, 0x04, 0x03, 0x05, | |
76 0x05, 0x04, 0x04, 0x00, 0x00, 0x01, 0x7D, 0x01, 0x02, 0x03, 0x00, 0x04, | |
77 0x11, 0x05, 0x12, 0x21, 0x31, 0x41, 0x06, 0x13, 0x51, 0x61, 0x07, 0x22, | |
78 0x71, 0x14, 0x32, 0x81, 0x91, 0xA1, 0x08, 0x23, 0x42, 0xB1, 0xC1, 0x15, | |
79 0x52, 0xD1, 0xF0, 0x24, 0x33, 0x62, 0x72, 0x82, 0x09, 0x0A, 0x16, 0x17, | |
80 0x18, 0x19, 0x1A, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2A, 0x34, 0x35, 0x36, | |
81 0x37, 0x38, 0x39, 0x3A, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x4A, | |
82 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x5A, 0x63, 0x64, 0x65, 0x66, | |
83 0x67, 0x68, 0x69, 0x6A, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7A, | |
84 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8A, 0x92, 0x93, 0x94, 0x95, | |
85 0x96, 0x97, 0x98, 0x99, 0x9A, 0xA2, 0xA3, 0xA4, 0xA5, 0xA6, 0xA7, 0xA8, | |
86 0xA9, 0xAA, 0xB2, 0xB3, 0xB4, 0xB5, 0xB6, 0xB7, 0xB8, 0xB9, 0xBA, 0xC2, | |
87 0xC3, 0xC4, 0xC5, 0xC6, 0xC7, 0xC8, 0xC9, 0xCA, 0xD2, 0xD3, 0xD4, 0xD5, | |
88 0xD6, 0xD7, 0xD8, 0xD9, 0xDA, 0xE1, 0xE2, 0xE3, 0xE4, 0xE5, 0xE6, 0xE7, | |
89 0xE8, 0xE9, 0xEA, 0xF1, 0xF2, 0xF3, 0xF4, 0xF5, 0xF6, 0xF7, 0xF8, 0xF9, | |
90 0xFA, 0x11, 0x00, 0x02, 0x01, 0x02, 0x04, 0x04, 0x03, 0x04, 0x07, 0x05, | |
91 0x04, 0x04, 0x00, 0x01, 0x02, 0x77, 0x00, 0x01, 0x02, 0x03, 0x11, 0x04, | |
92 0x05, 0x21, 0x31, 0x06, 0x12, 0x41, 0x51, 0x07, 0x61, 0x71, 0x13, 0x22, | |
93 0x32, 0x81, 0x08, 0x14, 0x42, 0x91, 0xA1, 0xB1, 0xC1, 0x09, 0x23, 0x33, | |
94 0x52, 0xF0, 0x15, 0x62, 0x72, 0xD1, 0x0A, 0x16, 0x24, 0x34, 0xE1, 0x25, | |
95 0xF1, 0x17, 0x18, 0x19, 0x1A, 0x26, 0x27, 0x28, 0x29, 0x2A, 0x35, 0x36, | |
96 0x37, 0x38, 0x39, 0x3A, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x4A, | |
97 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x5A, 0x63, 0x64, 0x65, 0x66, | |
98 0x67, 0x68, 0x69, 0x6A, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7A, | |
99 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8A, 0x92, 0x93, 0x94, | |
100 0x95, 0x96, 0x97, 0x98, 0x99, 0x9A, 0xA2, 0xA3, 0xA4, 0xA5, 0xA6, 0xA7, | |
101 0xA8, 0xA9, 0xAA, 0xB2, 0xB3, 0xB4, 0xB5, 0xB6, 0xB7, 0xB8, 0xB9, 0xBA, | |
102 0xC2, 0xC3, 0xC4, 0xC5, 0xC6, 0xC7, 0xC8, 0xC9, 0xCA, 0xD2, 0xD3, 0xD4, | |
103 0xD5, 0xD6, 0xD7, 0xD8, 0xD9, 0xDA, 0xE2, 0xE3, 0xE4, 0xE5, 0xE6, 0xE7, | |
104 0xE8, 0xE9, 0xEA, 0xF2, 0xF3, 0xF4, 0xF5, 0xF6, 0xF7, 0xF8, 0xF9, 0xFA | |
105 }; | |
106 | |
107 V4L2JpegDecodeAccelerator::BufferRecord::BufferRecord() | |
108 : address(nullptr), length(0), at_device(false) { | |
109 } | |
110 | |
111 V4L2JpegDecodeAccelerator::BufferRecord::~BufferRecord() { | |
112 } | |
113 | |
114 V4L2JpegDecodeAccelerator::JobRecord::JobRecord( | |
115 const media::BitstreamBuffer& bitstream_buffer, | |
116 scoped_refptr<media::VideoFrame> video_frame) | |
117 : bitstream_buffer_id(bitstream_buffer.id()), | |
118 shm(bitstream_buffer, true), | |
119 out_frame(video_frame) {} | |
120 | |
121 V4L2JpegDecodeAccelerator::JobRecord::~JobRecord() { | |
122 } | |
123 | |
124 V4L2JpegDecodeAccelerator::V4L2JpegDecodeAccelerator( | |
125 const scoped_refptr<V4L2Device>& device, | |
126 const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner) | |
127 : output_buffer_pixelformat_(0), | |
128 child_task_runner_(base::ThreadTaskRunnerHandle::Get()), | |
129 io_task_runner_(io_task_runner), | |
130 client_(nullptr), | |
131 device_(device), | |
132 decoder_thread_("V4L2JpegDecodeThread"), | |
133 device_poll_thread_("V4L2JpegDecodeDevicePollThread"), | |
134 input_streamon_(false), | |
135 output_streamon_(false), | |
136 weak_factory_(this) { | |
137 weak_ptr_ = weak_factory_.GetWeakPtr(); | |
138 } | |
139 | |
140 V4L2JpegDecodeAccelerator::~V4L2JpegDecodeAccelerator() { | |
141 DCHECK(child_task_runner_->BelongsToCurrentThread()); | |
142 | |
143 if (decoder_thread_.IsRunning()) { | |
144 decoder_task_runner_->PostTask( | |
145 FROM_HERE, base::Bind(&V4L2JpegDecodeAccelerator::DestroyTask, | |
146 base::Unretained(this))); | |
147 decoder_thread_.Stop(); | |
148 } | |
149 weak_factory_.InvalidateWeakPtrs(); | |
150 DCHECK(!device_poll_thread_.IsRunning()); | |
151 } | |
152 | |
153 void V4L2JpegDecodeAccelerator::DestroyTask() { | |
154 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | |
155 while (!input_jobs_.empty()) | |
156 input_jobs_.pop(); | |
157 while (!running_jobs_.empty()) | |
158 running_jobs_.pop(); | |
159 | |
160 // Stop streaming and the device_poll_thread_. | |
161 StopDevicePoll(); | |
162 | |
163 DestroyInputBuffers(); | |
164 DestroyOutputBuffers(); | |
165 } | |
166 | |
167 void V4L2JpegDecodeAccelerator::VideoFrameReady(int32_t bitstream_buffer_id) { | |
168 DCHECK(child_task_runner_->BelongsToCurrentThread()); | |
169 client_->VideoFrameReady(bitstream_buffer_id); | |
170 } | |
171 | |
172 void V4L2JpegDecodeAccelerator::NotifyError(int32_t bitstream_buffer_id, | |
173 Error error) { | |
174 DCHECK(child_task_runner_->BelongsToCurrentThread()); | |
175 LOG(ERROR) << "Notifying of error " << error << " for buffer id " | |
176 << bitstream_buffer_id; | |
177 client_->NotifyError(bitstream_buffer_id, error); | |
178 } | |
179 | |
180 void V4L2JpegDecodeAccelerator::PostNotifyError( | |
181 int32_t bitstream_buffer_id, | |
182 Error error) { | |
183 child_task_runner_->PostTask( | |
184 FROM_HERE, | |
185 base::Bind(&V4L2JpegDecodeAccelerator::NotifyError, weak_ptr_, | |
186 bitstream_buffer_id, error)); | |
187 } | |
188 | |
189 bool V4L2JpegDecodeAccelerator::Initialize(Client* client) { | |
190 DCHECK(child_task_runner_->BelongsToCurrentThread()); | |
191 | |
192 // Capabilities check. | |
193 struct v4l2_capability caps; | |
194 const __u32 kCapsRequired = V4L2_CAP_STREAMING | V4L2_CAP_VIDEO_M2M; | |
195 memset(&caps, 0, sizeof(caps)); | |
196 if (device_->Ioctl(VIDIOC_QUERYCAP, &caps) != 0) { | |
197 PLOG(ERROR) << __func__ << ": ioctl() failed: VIDIOC_QUERYCAP"; | |
198 return false; | |
199 } | |
200 if ((caps.capabilities & kCapsRequired) != kCapsRequired) { | |
201 LOG(ERROR) << __func__ << ": VIDIOC_QUERYCAP, caps check failed: 0x" | |
202 << std::hex << caps.capabilities; | |
203 return false; | |
204 } | |
205 | |
206 // Subscribe to the source change event. | |
207 struct v4l2_event_subscription sub; | |
208 memset(&sub, 0, sizeof(sub)); | |
209 sub.type = V4L2_EVENT_SOURCE_CHANGE; | |
210 if (device_->Ioctl(VIDIOC_SUBSCRIBE_EVENT, &sub) != 0) { | |
211 PLOG(ERROR) << __func__ << ": ioctl() failed: VIDIOC_SUBSCRIBE_EVENT"; | |
212 return false; | |
213 } | |
214 | |
215 if (!decoder_thread_.Start()) { | |
216 LOG(ERROR) << __func__ << ": decoder thread failed to start"; | |
217 return false; | |
218 } | |
219 client_ = client; | |
220 decoder_task_runner_ = decoder_thread_.task_runner(); | |
221 | |
222 decoder_task_runner_->PostTask( | |
223 FROM_HERE, base::Bind(&V4L2JpegDecodeAccelerator::StartDevicePoll, | |
224 base::Unretained(this))); | |
225 | |
226 DVLOG(1) << "V4L2JpegDecodeAccelerator initialized."; | |
227 return true; | |
228 } | |
229 | |
230 void V4L2JpegDecodeAccelerator::Decode( | |
231 const media::BitstreamBuffer& bitstream_buffer, | |
232 const scoped_refptr<media::VideoFrame>& video_frame) { | |
233 DVLOG(1) << "Decode(): input_id=" << bitstream_buffer.id() | |
234 << ", size=" << bitstream_buffer.size(); | |
235 DCHECK(io_task_runner_->BelongsToCurrentThread()); | |
236 | |
237 if (bitstream_buffer.id() < 0) { | |
238 LOG(ERROR) << "Invalid bitstream_buffer, id: " << bitstream_buffer.id(); | |
239 if (base::SharedMemory::IsHandleValid(bitstream_buffer.handle())) | |
240 base::SharedMemory::CloseHandle(bitstream_buffer.handle()); | |
241 PostNotifyError(bitstream_buffer.id(), INVALID_ARGUMENT); | |
242 return; | |
243 } | |
244 | |
245 if (video_frame->format() != media::PIXEL_FORMAT_I420) { | |
246 PostNotifyError(bitstream_buffer.id(), UNSUPPORTED_JPEG); | |
247 return; | |
248 } | |
249 | |
250 std::unique_ptr<JobRecord> job_record( | |
251 new JobRecord(bitstream_buffer, video_frame)); | |
252 | |
253 decoder_task_runner_->PostTask( | |
254 FROM_HERE, base::Bind(&V4L2JpegDecodeAccelerator::DecodeTask, | |
255 base::Unretained(this), base::Passed(&job_record))); | |
256 } | |
257 | |
258 bool V4L2JpegDecodeAccelerator::IsSupported() { | |
259 v4l2_fmtdesc fmtdesc; | |
260 memset(&fmtdesc, 0, sizeof(fmtdesc)); | |
261 fmtdesc.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; | |
262 | |
263 for (; device_->Ioctl(VIDIOC_ENUM_FMT, &fmtdesc) == 0; ++fmtdesc.index) { | |
264 if (fmtdesc.pixelformat == V4L2_PIX_FMT_JPEG) | |
265 return true; | |
266 } | |
267 return false; | |
268 } | |
269 | |
270 void V4L2JpegDecodeAccelerator::DecodeTask( | |
271 std::unique_ptr<JobRecord> job_record) { | |
272 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | |
273 if (!job_record->shm.Map()) { | |
274 PLOG(ERROR) << __func__ << ": could not map bitstream_buffer"; | |
275 PostNotifyError(job_record->bitstream_buffer_id, UNREADABLE_INPUT); | |
276 return; | |
277 } | |
278 input_jobs_.push(make_linked_ptr(job_record.release())); | |
279 | |
280 ServiceDeviceTask(false); | |
281 } | |
282 | |
283 size_t V4L2JpegDecodeAccelerator::InputBufferQueuedCount() { | |
284 return input_buffer_map_.size() - free_input_buffers_.size(); | |
285 } | |
286 | |
287 size_t V4L2JpegDecodeAccelerator::OutputBufferQueuedCount() { | |
288 return output_buffer_map_.size() - free_output_buffers_.size(); | |
289 } | |
290 | |
291 bool V4L2JpegDecodeAccelerator::ShouldRecreateInputBuffers() { | |
292 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | |
293 if (input_jobs_.empty()) | |
294 return false; | |
295 | |
296 linked_ptr<JobRecord> job_record = input_jobs_.front(); | |
297 // Check input buffer size is enough | |
298 return (input_buffer_map_.empty() || | |
299 (job_record->shm.size() + sizeof(kDefaultDhtSeg)) > | |
300 input_buffer_map_.front().length); | |
301 } | |
302 | |
303 bool V4L2JpegDecodeAccelerator::RecreateInputBuffers() { | |
304 DVLOG(3) << __func__; | |
305 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | |
306 | |
307 // If running queue is not empty, we should wait until pending frames finish. | |
308 if (!running_jobs_.empty()) | |
309 return true; | |
310 | |
311 DestroyInputBuffers(); | |
312 | |
313 if (!CreateInputBuffers()) { | |
314 LOG(ERROR) << "Create input buffers failed."; | |
315 return false; | |
316 } | |
317 | |
318 return true; | |
319 } | |
320 | |
321 bool V4L2JpegDecodeAccelerator::RecreateOutputBuffers() { | |
322 DVLOG(3) << __func__; | |
323 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | |
324 | |
325 DestroyOutputBuffers(); | |
326 | |
327 if (!CreateOutputBuffers()) { | |
328 LOG(ERROR) << "Create output buffers failed."; | |
329 return false; | |
330 } | |
331 | |
332 return true; | |
333 } | |
334 | |
335 bool V4L2JpegDecodeAccelerator::CreateInputBuffers() { | |
336 DVLOG(3) << __func__; | |
337 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | |
338 DCHECK(!input_streamon_); | |
339 DCHECK(!input_jobs_.empty()); | |
340 linked_ptr<JobRecord> job_record = input_jobs_.front(); | |
341 // The input image may miss huffman table. We didn't parse the image before, | |
342 // so we create more to avoid the situation of not enough memory. | |
343 // Reserve twice size to avoid recreating input buffer frequently. | |
344 size_t reserve_size = (job_record->shm.size() + sizeof(kDefaultDhtSeg)) * 2; | |
345 struct v4l2_format format; | |
346 memset(&format, 0, sizeof(format)); | |
347 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; | |
348 format.fmt.pix.pixelformat = V4L2_PIX_FMT_JPEG; | |
349 format.fmt.pix.sizeimage = reserve_size; | |
350 format.fmt.pix.field = V4L2_FIELD_ANY; | |
351 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format); | |
352 | |
353 struct v4l2_requestbuffers reqbufs; | |
354 memset(&reqbufs, 0, sizeof(reqbufs)); | |
355 reqbufs.count = kBufferCount; | |
356 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; | |
357 reqbufs.memory = V4L2_MEMORY_MMAP; | |
358 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs); | |
359 | |
360 DCHECK(input_buffer_map_.empty()); | |
361 input_buffer_map_.resize(reqbufs.count); | |
362 | |
363 for (size_t i = 0; i < input_buffer_map_.size(); ++i) { | |
364 free_input_buffers_.push_back(i); | |
365 | |
366 struct v4l2_buffer buffer; | |
367 memset(&buffer, 0, sizeof(buffer)); | |
368 buffer.index = i; | |
369 buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; | |
370 buffer.memory = V4L2_MEMORY_MMAP; | |
371 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer); | |
372 void* address = device_->Mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, | |
373 MAP_SHARED, buffer.m.offset); | |
374 if (address == MAP_FAILED) { | |
375 PLOG(ERROR) << __func__ << ": mmap() failed"; | |
376 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); | |
377 return false; | |
378 } | |
379 input_buffer_map_[i].address = address; | |
380 input_buffer_map_[i].length = buffer.length; | |
381 } | |
382 | |
383 return true; | |
384 } | |
385 | |
386 bool V4L2JpegDecodeAccelerator::CreateOutputBuffers() { | |
387 DVLOG(3) << __func__; | |
388 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | |
389 DCHECK(!output_streamon_); | |
390 DCHECK(!running_jobs_.empty()); | |
391 linked_ptr<JobRecord> job_record = running_jobs_.front(); | |
392 | |
393 size_t frame_size = media::VideoFrame::AllocationSize( | |
394 media::PIXEL_FORMAT_I420, job_record->out_frame->coded_size()); | |
395 struct v4l2_format format; | |
396 memset(&format, 0, sizeof(format)); | |
397 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | |
398 format.fmt.pix.width = job_record->out_frame->coded_size().width(); | |
399 format.fmt.pix.height = job_record->out_frame->coded_size().height(); | |
400 format.fmt.pix.sizeimage = frame_size; | |
401 format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420; | |
402 format.fmt.pix.field = V4L2_FIELD_ANY; | |
403 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format); | |
404 output_buffer_pixelformat_ = format.fmt.pix.pixelformat; | |
405 output_buffer_coded_size_.SetSize(format.fmt.pix.width, | |
406 format.fmt.pix.height); | |
407 | |
408 struct v4l2_requestbuffers reqbufs; | |
409 memset(&reqbufs, 0, sizeof(reqbufs)); | |
410 reqbufs.count = kBufferCount; | |
411 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | |
412 reqbufs.memory = V4L2_MEMORY_MMAP; | |
413 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs); | |
414 | |
415 DCHECK(output_buffer_map_.empty()); | |
416 output_buffer_map_.resize(reqbufs.count); | |
417 | |
418 media::VideoPixelFormat output_format = | |
419 V4L2Device::V4L2PixFmtToVideoPixelFormat(output_buffer_pixelformat_); | |
420 | |
421 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { | |
422 free_output_buffers_.push_back(i); | |
423 | |
424 struct v4l2_buffer buffer; | |
425 memset(&buffer, 0, sizeof(buffer)); | |
426 buffer.index = i; | |
427 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | |
428 buffer.memory = V4L2_MEMORY_MMAP; | |
429 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer); | |
430 | |
431 DCHECK_GE(buffer.length, | |
432 media::VideoFrame::AllocationSize( | |
433 output_format, | |
434 gfx::Size(format.fmt.pix.width, format.fmt.pix.height))); | |
435 | |
436 void* address = device_->Mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, | |
437 MAP_SHARED, buffer.m.offset); | |
438 if (address == MAP_FAILED) { | |
439 PLOG(ERROR) << __func__ << ": mmap() failed"; | |
440 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); | |
441 return false; | |
442 } | |
443 output_buffer_map_[i].address = address; | |
444 output_buffer_map_[i].length = buffer.length; | |
445 } | |
446 | |
447 return true; | |
448 } | |
449 | |
450 void V4L2JpegDecodeAccelerator::DestroyInputBuffers() { | |
451 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | |
452 DCHECK(!input_streamon_); | |
453 | |
454 if (input_streamon_) { | |
455 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT; | |
456 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMOFF, &type); | |
457 input_streamon_ = false; | |
458 } | |
459 | |
460 for (size_t i = 0; i < input_buffer_map_.size(); ++i) { | |
461 BufferRecord& input_record = input_buffer_map_[i]; | |
462 device_->Munmap(input_record.address, input_record.length); | |
463 } | |
464 | |
465 struct v4l2_requestbuffers reqbufs; | |
466 memset(&reqbufs, 0, sizeof(reqbufs)); | |
467 reqbufs.count = 0; | |
468 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; | |
469 reqbufs.memory = V4L2_MEMORY_MMAP; | |
470 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs); | |
471 | |
472 input_buffer_map_.clear(); | |
473 free_input_buffers_.clear(); | |
474 } | |
475 | |
476 void V4L2JpegDecodeAccelerator::DestroyOutputBuffers() { | |
477 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | |
478 DCHECK(!output_streamon_); | |
479 | |
480 if (output_streamon_) { | |
481 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | |
482 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMOFF, &type); | |
483 output_streamon_ = false; | |
484 } | |
485 | |
486 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { | |
487 BufferRecord& output_record = output_buffer_map_[i]; | |
488 device_->Munmap(output_record.address, output_record.length); | |
489 } | |
490 | |
491 struct v4l2_requestbuffers reqbufs; | |
492 memset(&reqbufs, 0, sizeof(reqbufs)); | |
493 reqbufs.count = 0; | |
494 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | |
495 reqbufs.memory = V4L2_MEMORY_MMAP; | |
496 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs); | |
497 | |
498 output_buffer_map_.clear(); | |
499 free_output_buffers_.clear(); | |
500 } | |
501 | |
502 void V4L2JpegDecodeAccelerator::DevicePollTask() { | |
503 DCHECK(device_poll_task_runner_->BelongsToCurrentThread()); | |
504 | |
505 bool event_pending; | |
506 if (!device_->Poll(true, &event_pending)) { | |
507 PLOG(ERROR) << __func__ << ": Poll device error."; | |
508 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); | |
509 return; | |
510 } | |
511 | |
512 // All processing should happen on ServiceDeviceTask(), since we shouldn't | |
513 // touch decoder state from this thread. | |
514 decoder_task_runner_->PostTask( | |
515 FROM_HERE, base::Bind(&V4L2JpegDecodeAccelerator::ServiceDeviceTask, | |
516 base::Unretained(this), event_pending)); | |
517 } | |
518 | |
519 bool V4L2JpegDecodeAccelerator::DequeueSourceChangeEvent() { | |
520 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | |
521 | |
522 struct v4l2_event ev; | |
523 memset(&ev, 0, sizeof(ev)); | |
524 | |
525 if (device_->Ioctl(VIDIOC_DQEVENT, &ev) == 0) { | |
526 if (ev.type == V4L2_EVENT_SOURCE_CHANGE) { | |
527 DVLOG(3) << __func__ << ": got source change event: " | |
528 << ev.u.src_change.changes; | |
529 if (ev.u.src_change.changes & | |
530 (V4L2_EVENT_SRC_CH_RESOLUTION | V4L2_EVENT_SRC_CH_PIXELFORMAT)) { | |
531 return true; | |
532 } | |
533 LOG(ERROR) << __func__ << ": unexpected source change event."; | |
534 } else { | |
535 LOG(ERROR) << __func__ << ": got an event (" << ev.type | |
536 << ") we haven't subscribed to."; | |
537 } | |
538 } else { | |
539 LOG(ERROR) << __func__ << ": dequeue event failed."; | |
540 } | |
541 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); | |
542 return false; | |
543 } | |
544 | |
545 void V4L2JpegDecodeAccelerator::ServiceDeviceTask(bool event_pending) { | |
546 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | |
547 // If DestroyTask() shuts |device_poll_thread_| down, we should early-out. | |
548 if (!device_poll_thread_.IsRunning()) | |
549 return; | |
550 | |
551 if (!running_jobs_.empty()) | |
552 Dequeue(); | |
553 | |
554 if (ShouldRecreateInputBuffers() && !RecreateInputBuffers()) | |
555 return; | |
556 | |
557 if (event_pending) { | |
558 if (!DequeueSourceChangeEvent()) return; | |
559 if (!RecreateOutputBuffers()) return; | |
560 } | |
561 | |
562 EnqueueInput(); | |
563 EnqueueOutput(); | |
564 | |
565 if (!running_jobs_.empty()) { | |
566 device_poll_task_runner_->PostTask( | |
567 FROM_HERE, base::Bind(&V4L2JpegDecodeAccelerator::DevicePollTask, | |
568 base::Unretained(this))); | |
569 } | |
570 | |
571 DVLOG(2) << __func__ << ": buffer counts: INPUT[" | |
572 << input_jobs_.size() << "] => DEVICE[" | |
573 << free_input_buffers_.size() << "/" | |
574 << input_buffer_map_.size() << "->" | |
575 << free_output_buffers_.size() << "/" | |
576 << output_buffer_map_.size() << "]"; | |
577 } | |
578 | |
579 void V4L2JpegDecodeAccelerator::EnqueueInput() { | |
580 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | |
581 while (!input_jobs_.empty() && !free_input_buffers_.empty()) { | |
582 // If input buffers are required to re-create, do not enqueue input record | |
583 // until all pending frames are handled by device. | |
584 if (ShouldRecreateInputBuffers()) | |
585 break; | |
586 if (!EnqueueInputRecord()) | |
587 return; | |
588 } | |
589 // Check here because we cannot STREAMON before QBUF in earlier kernel. | |
590 // (kernel version < 3.14) | |
591 if (!input_streamon_ && InputBufferQueuedCount()) { | |
592 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT; | |
593 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type); | |
594 input_streamon_ = true; | |
595 } | |
596 } | |
597 | |
598 void V4L2JpegDecodeAccelerator::EnqueueOutput() { | |
599 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | |
600 // Output record can be enqueued because the output coded sizes of the frames | |
601 // currently in the pipeline are all the same. | |
602 while (running_jobs_.size() > OutputBufferQueuedCount() && | |
603 !free_output_buffers_.empty()) { | |
604 if (!EnqueueOutputRecord()) | |
605 return; | |
606 } | |
607 // Check here because we cannot STREAMON before QBUF in earlier kernel. | |
608 // (kernel version < 3.14) | |
609 if (!output_streamon_ && OutputBufferQueuedCount()) { | |
610 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | |
611 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type); | |
612 output_streamon_ = true; | |
613 } | |
614 } | |
615 | |
616 static bool CopyOutputImage(const uint32_t src_pixelformat, | |
617 const void* src_addr, | |
618 const gfx::Size& src_coded_size, | |
619 const scoped_refptr<media::VideoFrame>& dst_frame) { | |
620 media::VideoPixelFormat format = | |
621 V4L2Device::V4L2PixFmtToVideoPixelFormat(src_pixelformat); | |
622 size_t src_size = media::VideoFrame::AllocationSize(format, src_coded_size); | |
623 uint8_t* dst_y = dst_frame->data(media::VideoFrame::kYPlane); | |
624 uint8_t* dst_u = dst_frame->data(media::VideoFrame::kUPlane); | |
625 uint8_t* dst_v = dst_frame->data(media::VideoFrame::kVPlane); | |
626 size_t dst_y_stride = dst_frame->stride(media::VideoFrame::kYPlane); | |
627 size_t dst_u_stride = dst_frame->stride(media::VideoFrame::kUPlane); | |
628 size_t dst_v_stride = dst_frame->stride(media::VideoFrame::kVPlane); | |
629 | |
630 // If the source format is I420, ConvertToI420 will simply copy the frame. | |
631 if (libyuv::ConvertToI420(static_cast<uint8_t*>(const_cast<void*>(src_addr)), | |
632 src_size, | |
633 dst_y, dst_y_stride, | |
634 dst_u, dst_u_stride, | |
635 dst_v, dst_v_stride, | |
636 0, 0, | |
637 src_coded_size.width(), | |
638 src_coded_size.height(), | |
639 dst_frame->coded_size().width(), | |
640 dst_frame->coded_size().height(), | |
641 libyuv::kRotate0, | |
642 src_pixelformat)) { | |
643 LOG(ERROR) << "ConvertToI420 failed. Source format: " << src_pixelformat; | |
644 return false; | |
645 } | |
646 return true; | |
647 } | |
648 | |
649 void V4L2JpegDecodeAccelerator::Dequeue() { | |
650 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | |
651 | |
652 // Dequeue completed input (VIDEO_OUTPUT) buffers, | |
653 // and recycle to the free list. | |
654 struct v4l2_buffer dqbuf; | |
655 while (InputBufferQueuedCount() > 0) { | |
656 DCHECK(input_streamon_); | |
657 memset(&dqbuf, 0, sizeof(dqbuf)); | |
658 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; | |
659 dqbuf.memory = V4L2_MEMORY_MMAP; | |
660 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) { | |
661 if (errno == EAGAIN) { | |
662 // EAGAIN if we're just out of buffers to dequeue. | |
663 break; | |
664 } | |
665 PLOG(ERROR) << "ioctl() failed: input buffer VIDIOC_DQBUF failed."; | |
666 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); | |
667 return; | |
668 } | |
669 BufferRecord& input_record = input_buffer_map_[dqbuf.index]; | |
670 DCHECK(input_record.at_device); | |
671 input_record.at_device = false; | |
672 free_input_buffers_.push_back(dqbuf.index); | |
673 | |
674 if (dqbuf.flags & V4L2_BUF_FLAG_ERROR) { | |
675 DVLOG(1) << "Dequeue input buffer error."; | |
676 PostNotifyError(kInvalidBitstreamBufferId, UNSUPPORTED_JPEG); | |
677 running_jobs_.pop(); | |
678 } | |
679 } | |
680 | |
681 // Dequeue completed output (VIDEO_CAPTURE) buffers, recycle to the free list. | |
682 // Return the finished buffer to the client via the job ready callback. | |
683 // If dequeued input buffer has an error, the error frame has removed from | |
684 // |running_jobs_|. We only have to dequeue output buffer when we actually | |
685 // have pending frames in |running_jobs_| and also enqueued output buffers. | |
686 while (!running_jobs_.empty() && OutputBufferQueuedCount() > 0) { | |
687 DCHECK(output_streamon_); | |
688 memset(&dqbuf, 0, sizeof(dqbuf)); | |
689 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | |
690 // From experiments, using MMAP and memory copy is still faster than | |
691 // USERPTR. Also, client doesn't need to consider the buffer alignment and | |
692 // JpegDecodeAccelerator API will be simpler. | |
693 dqbuf.memory = V4L2_MEMORY_MMAP; | |
694 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) { | |
695 if (errno == EAGAIN) { | |
696 // EAGAIN if we're just out of buffers to dequeue. | |
697 break; | |
698 } | |
699 PLOG(ERROR) << "ioctl() failed: output buffer VIDIOC_DQBUF failed."; | |
700 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); | |
701 return; | |
702 } | |
703 BufferRecord& output_record = output_buffer_map_[dqbuf.index]; | |
704 DCHECK(output_record.at_device); | |
705 output_record.at_device = false; | |
706 free_output_buffers_.push_back(dqbuf.index); | |
707 | |
708 // Jobs are always processed in FIFO order. | |
709 linked_ptr<JobRecord> job_record = running_jobs_.front(); | |
710 running_jobs_.pop(); | |
711 | |
712 if (dqbuf.flags & V4L2_BUF_FLAG_ERROR) { | |
713 DVLOG(1) << "Dequeue output buffer error."; | |
714 PostNotifyError(kInvalidBitstreamBufferId, UNSUPPORTED_JPEG); | |
715 } else { | |
716 // Copy the decoded data from output buffer to the buffer provided by the | |
717 // client. Do format conversion when output format is not | |
718 // V4L2_PIX_FMT_YUV420. | |
719 if (!CopyOutputImage(output_buffer_pixelformat_, output_record.address, | |
720 output_buffer_coded_size_, job_record->out_frame)) { | |
721 PostNotifyError(job_record->bitstream_buffer_id, PLATFORM_FAILURE); | |
722 return; | |
723 } | |
724 | |
725 DVLOG(3) << "Decoding finished, returning bitstream buffer, id=" | |
726 << job_record->bitstream_buffer_id; | |
727 | |
728 child_task_runner_->PostTask( | |
729 FROM_HERE, base::Bind(&V4L2JpegDecodeAccelerator::VideoFrameReady, | |
730 weak_ptr_, job_record->bitstream_buffer_id)); | |
731 } | |
732 } | |
733 } | |
734 | |
735 static bool AddHuffmanTable(const void* input_ptr, | |
736 size_t input_size, | |
737 void* output_ptr, | |
738 size_t output_size) { | |
739 DCHECK(input_ptr); | |
740 DCHECK(output_ptr); | |
741 DCHECK_LE((input_size + sizeof(kDefaultDhtSeg)), output_size); | |
742 | |
743 base::BigEndianReader reader(static_cast<const char*>(input_ptr), input_size); | |
744 bool has_marker_dht = false; | |
745 bool has_marker_sos = false; | |
746 uint8_t marker1, marker2; | |
747 READ_U8_OR_RETURN_FALSE(reader, &marker1); | |
748 READ_U8_OR_RETURN_FALSE(reader, &marker2); | |
749 if (marker1 != media::JPEG_MARKER_PREFIX || marker2 != media::JPEG_SOI) { | |
750 DLOG(ERROR) << __func__ << ": The input is not a Jpeg"; | |
751 return false; | |
752 } | |
753 | |
754 // copy SOI marker (0xFF, 0xD8) | |
755 memcpy(output_ptr, input_ptr, 2); | |
756 size_t current_offset = 2; | |
757 | |
758 while (!has_marker_sos && !has_marker_dht) { | |
759 const char* start_addr = reader.ptr(); | |
760 READ_U8_OR_RETURN_FALSE(reader, &marker1); | |
761 if (marker1 != media::JPEG_MARKER_PREFIX) { | |
762 DLOG(ERROR) << __func__ << ": marker1 != 0xFF"; | |
763 return false; | |
764 } | |
765 do { | |
766 READ_U8_OR_RETURN_FALSE(reader, &marker2); | |
767 } while (marker2 == media::JPEG_MARKER_PREFIX); // skip fill bytes | |
768 | |
769 uint16_t size; | |
770 READ_U16_OR_RETURN_FALSE(reader, &size); | |
771 // The size includes the size field itself. | |
772 if (size < sizeof(size)) { | |
773 DLOG(ERROR) << __func__ << ": Ill-formed JPEG. Segment size (" << size | |
774 << ") is smaller than size field (" << sizeof(size) << ")"; | |
775 return false; | |
776 } | |
777 size -= sizeof(size); | |
778 | |
779 switch (marker2) { | |
780 case media::JPEG_DHT: { | |
781 has_marker_dht = true; | |
782 break; | |
783 } | |
784 case media::JPEG_SOS: { | |
785 if (!has_marker_dht) { | |
786 memcpy(static_cast<uint8_t*>(output_ptr) + current_offset, | |
787 kDefaultDhtSeg, sizeof(kDefaultDhtSeg)); | |
788 current_offset += sizeof(kDefaultDhtSeg); | |
789 } | |
790 has_marker_sos = true; | |
791 break; | |
792 } | |
793 default: | |
794 break; | |
795 } | |
796 | |
797 if (!reader.Skip(size)) { | |
798 DLOG(ERROR) << __func__ << ": Ill-formed JPEG. Remaining size (" | |
799 << reader.remaining() | |
800 << ") is smaller than header specified (" << size << ")"; | |
801 return false; | |
802 } | |
803 | |
804 size_t segment_size = static_cast<size_t>(reader.ptr() - start_addr); | |
805 memcpy(static_cast<uint8_t*>(output_ptr) + current_offset, start_addr, | |
806 segment_size); | |
807 current_offset += segment_size; | |
808 } | |
809 if (reader.remaining()) { | |
810 memcpy(static_cast<uint8_t*>(output_ptr) + current_offset, reader.ptr(), | |
811 reader.remaining()); | |
812 } | |
813 return true; | |
814 } | |
815 | |
816 bool V4L2JpegDecodeAccelerator::EnqueueInputRecord() { | |
817 DCHECK(!input_jobs_.empty()); | |
818 DCHECK(!free_input_buffers_.empty()); | |
819 | |
820 // Enqueue an input (VIDEO_OUTPUT) buffer for an input video frame. | |
821 linked_ptr<JobRecord> job_record = input_jobs_.front(); | |
822 input_jobs_.pop(); | |
823 const int index = free_input_buffers_.back(); | |
824 BufferRecord& input_record = input_buffer_map_[index]; | |
825 DCHECK(!input_record.at_device); | |
826 | |
827 // It will add default huffman segment if it's missing. | |
828 if (!AddHuffmanTable(job_record->shm.memory(), job_record->shm.size(), | |
829 input_record.address, input_record.length)) { | |
830 PostNotifyError(job_record->bitstream_buffer_id, PARSE_JPEG_FAILED); | |
831 return false; | |
832 } | |
833 | |
834 struct v4l2_buffer qbuf; | |
835 memset(&qbuf, 0, sizeof(qbuf)); | |
836 qbuf.index = index; | |
837 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; | |
838 qbuf.memory = V4L2_MEMORY_MMAP; | |
839 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf); | |
840 input_record.at_device = true; | |
841 running_jobs_.push(job_record); | |
842 free_input_buffers_.pop_back(); | |
843 | |
844 DVLOG(3) << __func__ | |
845 << ": enqueued frame id=" << job_record->bitstream_buffer_id | |
846 << " to device."; | |
847 return true; | |
848 } | |
849 | |
850 bool V4L2JpegDecodeAccelerator::EnqueueOutputRecord() { | |
851 DCHECK(!free_output_buffers_.empty()); | |
852 | |
853 // Enqueue an output (VIDEO_CAPTURE) buffer. | |
854 const int index = free_output_buffers_.back(); | |
855 BufferRecord& output_record = output_buffer_map_[index]; | |
856 DCHECK(!output_record.at_device); | |
857 struct v4l2_buffer qbuf; | |
858 memset(&qbuf, 0, sizeof(qbuf)); | |
859 qbuf.index = index; | |
860 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | |
861 qbuf.memory = V4L2_MEMORY_MMAP; | |
862 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf); | |
863 output_record.at_device = true; | |
864 free_output_buffers_.pop_back(); | |
865 return true; | |
866 } | |
867 | |
868 void V4L2JpegDecodeAccelerator::StartDevicePoll() { | |
869 DVLOG(3) << __func__ << ": starting device poll"; | |
870 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | |
871 DCHECK(!device_poll_thread_.IsRunning()); | |
872 | |
873 if (!device_poll_thread_.Start()) { | |
874 LOG(ERROR) << __func__ << ": Device thread failed to start"; | |
875 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); | |
876 return; | |
877 } | |
878 device_poll_task_runner_ = device_poll_thread_.task_runner(); | |
879 } | |
880 | |
881 bool V4L2JpegDecodeAccelerator::StopDevicePoll() { | |
882 DVLOG(3) << __func__ << ": stopping device poll"; | |
883 // Signal the DevicePollTask() to stop, and stop the device poll thread. | |
884 if (!device_->SetDevicePollInterrupt()) { | |
885 LOG(ERROR) << __func__ << ": SetDevicePollInterrupt failed."; | |
886 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); | |
887 return false; | |
888 } | |
889 | |
890 device_poll_thread_.Stop(); | |
891 | |
892 // Clear the interrupt now, to be sure. | |
893 if (!device_->ClearDevicePollInterrupt()) | |
894 return false; | |
895 | |
896 return true; | |
897 } | |
898 | |
899 } // namespace content | |
OLD | NEW |