OLD | NEW |
---|---|
1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include <fcntl.h> | 5 #include <fcntl.h> |
6 #include <linux/videodev2.h> | 6 #include <linux/videodev2.h> |
7 #include <poll.h> | 7 #include <poll.h> |
8 #include <sys/eventfd.h> | 8 #include <sys/eventfd.h> |
9 #include <sys/ioctl.h> | 9 #include <sys/ioctl.h> |
10 #include <sys/mman.h> | 10 #include <sys/mman.h> |
11 | 11 |
12 #include "base/bind.h" | 12 #include "base/bind.h" |
13 #include "base/bind_helpers.h" | 13 #include "base/bind_helpers.h" |
14 #include "base/callback.h" | 14 #include "base/callback.h" |
15 #include "base/message_loop/message_loop_proxy.h" | 15 #include "base/message_loop/message_loop_proxy.h" |
16 #include "base/numerics/safe_conversions.h" | 16 #include "base/numerics/safe_conversions.h" |
17 #include "content/common/gpu/media/v4l2_image_processor.h" | 17 #include "content/common/gpu/media/v4l2_jpeg_decode_accelerator.h" |
18 #include "media/base/bind_to_current_loop.h" | 18 #include "media/base/bind_to_current_loop.h" |
19 #include "media/base/video_frame.h" | |
19 | 20 |
20 #define NOTIFY_ERROR() \ | 21 //#undef DVLOG |
21 do { \ | 22 //#define DVLOG VLOG |
22 LOG(ERROR) << "calling NotifyError()"; \ | |
23 NotifyError(); \ | |
24 } while (0) | |
25 | 23 |
26 #define IOCTL_OR_ERROR_RETURN_VALUE(type, arg, value) \ | 24 #define IOCTL_OR_ERROR_RETURN_VALUE(type, arg, value) \ |
27 do { \ | 25 do { \ |
28 if (device_->Ioctl(type, arg) != 0) { \ | 26 if (device_->Ioctl(type, arg) != 0) { \ |
29 PLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \ | 27 PLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \ |
30 return value; \ | 28 return value; \ |
31 } \ | 29 } \ |
32 } while (0) | 30 } while (0) |
33 | 31 |
34 #define IOCTL_OR_ERROR_RETURN(type, arg) \ | 32 #define IOCTL_OR_ERROR_RETURN(type, arg) \ |
35 IOCTL_OR_ERROR_RETURN_VALUE(type, arg, ((void)0)) | 33 IOCTL_OR_ERROR_RETURN_VALUE(type, arg, ((void)0)) |
36 | 34 |
37 #define IOCTL_OR_ERROR_RETURN_FALSE(type, arg) \ | 35 #define IOCTL_OR_ERROR_RETURN_FALSE(type, arg) \ |
38 IOCTL_OR_ERROR_RETURN_VALUE(type, arg, false) | 36 IOCTL_OR_ERROR_RETURN_VALUE(type, arg, false) |
39 | 37 |
40 #define IOCTL_OR_LOG_ERROR(type, arg) \ | 38 #define IOCTL_OR_LOG_ERROR(type, arg) \ |
41 do { \ | 39 do { \ |
42 if (device_->Ioctl(type, arg) != 0) \ | 40 if (device_->Ioctl(type, arg) != 0) \ |
43 PLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \ | 41 PLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \ |
44 } while (0) | 42 } while (0) |
45 | 43 |
46 namespace content { | 44 namespace content { |
47 | 45 |
48 V4L2ImageProcessor::InputRecord::InputRecord() : at_device(false) { | 46 V4L2JpegDecodeAccelerator::InputRecord::InputRecord() : at_device(false) { |
49 } | 47 } |
50 | 48 |
51 V4L2ImageProcessor::InputRecord::~InputRecord() { | 49 V4L2JpegDecodeAccelerator::InputRecord::~InputRecord() { |
52 } | 50 } |
53 | 51 |
54 V4L2ImageProcessor::OutputRecord::OutputRecord() | 52 V4L2JpegDecodeAccelerator::OutputRecord::OutputRecord() |
55 : at_device(false), at_client(false) { | 53 : address(nullptr), length(0), at_device(false) { |
56 } | 54 } |
57 | 55 |
58 V4L2ImageProcessor::OutputRecord::~OutputRecord() { | 56 V4L2JpegDecodeAccelerator::OutputRecord::~OutputRecord() { |
59 } | 57 } |
60 | 58 |
61 V4L2ImageProcessor::JobRecord::JobRecord() { | 59 V4L2JpegDecodeAccelerator::JobRecord::JobRecord( |
60 media::BitstreamBuffer bitstream_buffer, | |
61 scoped_refptr<media::VideoFrame> video_frame) | |
62 : bitstream_buffer(bitstream_buffer), frame(video_frame) { | |
62 } | 63 } |
63 | 64 |
64 V4L2ImageProcessor::JobRecord::~JobRecord() { | 65 V4L2JpegDecodeAccelerator::JobRecord::~JobRecord() { |
65 } | 66 } |
66 | 67 |
67 V4L2ImageProcessor::V4L2ImageProcessor(const scoped_refptr<V4L2Device>& device) | 68 V4L2JpegDecodeAccelerator::V4L2JpegDecodeAccelerator( |
68 : input_format_(media::VideoFrame::UNKNOWN), | 69 const scoped_refptr<V4L2Device>& device, |
69 output_format_(media::VideoFrame::UNKNOWN), | 70 const scoped_refptr<base::MessageLoopProxy>& io_message_loop_proxy) |
70 input_format_fourcc_(0), | 71 : child_message_loop_proxy_(base::MessageLoopProxy::current()), |
71 output_format_fourcc_(0), | 72 io_message_loop_proxy_(io_message_loop_proxy), |
72 input_planes_count_(0), | |
73 output_planes_count_(0), | |
74 child_message_loop_proxy_(base::MessageLoopProxy::current()), | |
75 device_(device), | 73 device_(device), |
76 device_thread_("V4L2ImageProcessorThread"), | 74 device_thread_("V4L2JpegDecodeThread"), |
77 device_poll_thread_("V4L2ImageProcessorDevicePollThread"), | 75 device_poll_thread_("V4L2JpegDecodeDevicePollThread"), |
78 input_streamon_(false), | 76 input_streamon_(false), |
79 input_buffer_queued_count_(0), | 77 input_buffer_queued_count_(0), |
80 output_streamon_(false), | 78 output_streamon_(false), |
81 output_buffer_queued_count_(0), | 79 output_buffer_queued_count_(0), |
82 device_weak_factory_(this) { | 80 device_weak_factory_(this) { |
81 device_weak_ = device_weak_factory_.GetWeakPtr(); | |
83 } | 82 } |
84 | 83 |
85 V4L2ImageProcessor::~V4L2ImageProcessor() { | 84 V4L2JpegDecodeAccelerator::~V4L2JpegDecodeAccelerator() { |
86 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); | |
87 DCHECK(!device_thread_.IsRunning()); | 85 DCHECK(!device_thread_.IsRunning()); |
88 DCHECK(!device_poll_thread_.IsRunning()); | 86 DCHECK(!device_poll_thread_.IsRunning()); |
89 | |
90 DestroyInputBuffers(); | 87 DestroyInputBuffers(); |
91 DestroyOutputBuffers(); | 88 DestroyOutputBuffers(); |
92 } | 89 } |
93 | 90 |
94 void V4L2ImageProcessor::NotifyError() { | 91 void V4L2JpegDecodeAccelerator::NotifyError(int32_t bitstream_buffer_id, |
95 if (!child_message_loop_proxy_->BelongsToCurrentThread()) | 92 Error error) { |
96 child_message_loop_proxy_->PostTask(FROM_HERE, error_cb_); | 93 if (!child_message_loop_proxy_->BelongsToCurrentThread()) { |
wuchengli
2015/05/25 10:29:23
Don't use this pattern. See piman's comment in htt
henryhsu
2015/06/05 03:28:56
Done.
| |
97 else | 94 child_message_loop_proxy_->PostTask( |
98 error_cb_.Run(); | 95 FROM_HERE, |
96 base::Bind(&V4L2JpegDecodeAccelerator::NotifyError, | |
97 device_weak_, bitstream_buffer_id, error)); | |
98 return; | |
99 } | |
100 LOG(ERROR) << "Notifying of error " << error << " for buffer id " | |
101 << bitstream_buffer_id; | |
102 if (client_) { | |
103 client_->NotifyError(bitstream_buffer_id, error); | |
104 client_ptr_factory_.reset(); | |
105 } | |
99 } | 106 } |
100 | 107 |
101 bool V4L2ImageProcessor::Initialize(media::VideoFrame::Format input_format, | 108 bool V4L2JpegDecodeAccelerator::Initialize(Client* client) { |
102 media::VideoFrame::Format output_format, | 109 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
103 gfx::Size input_visible_size, | |
104 gfx::Size output_visible_size, | |
105 gfx::Size output_allocated_size, | |
106 const base::Closure& error_cb) { | |
107 DCHECK(!error_cb.is_null()); | |
108 error_cb_ = error_cb; | |
109 | 110 |
110 // TODO(posciak): Replace Exynos-specific format/parameter hardcoding in this | 111 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client)); |
111 // class with proper capability enumeration. | 112 client_ = client_ptr_factory_->GetWeakPtr(); |
kcwu
2015/05/26 10:47:23
No need to use weak pointer for |client|. |client|
henryhsu
2015/06/05 03:28:56
Done.
| |
112 DCHECK_EQ(input_format, media::VideoFrame::I420); | |
113 DCHECK_EQ(output_format, media::VideoFrame::NV12); | |
114 | |
115 input_format_ = input_format; | |
116 output_format_ = output_format; | |
117 input_format_fourcc_ = V4L2Device::VideoFrameFormatToV4L2PixFmt(input_format); | |
118 output_format_fourcc_ = | |
119 V4L2Device::VideoFrameFormatToV4L2PixFmt(output_format); | |
120 | |
121 if (!input_format_fourcc_ || !output_format_fourcc_) { | |
122 LOG(ERROR) << "Unrecognized format(s)"; | |
123 return false; | |
124 } | |
125 | |
126 input_visible_size_ = input_visible_size; | |
127 output_visible_size_ = output_visible_size; | |
128 output_allocated_size_ = output_allocated_size; | |
129 | |
130 input_planes_count_ = media::VideoFrame::NumPlanes(input_format); | |
131 DCHECK_LE(input_planes_count_, static_cast<size_t>(VIDEO_MAX_PLANES)); | |
132 output_planes_count_ = media::VideoFrame::NumPlanes(output_format); | |
133 DCHECK_LE(output_planes_count_, static_cast<size_t>(VIDEO_MAX_PLANES)); | |
134 | 113 |
135 // Capabilities check. | 114 // Capabilities check. |
136 struct v4l2_capability caps; | 115 struct v4l2_capability caps; |
137 memset(&caps, 0, sizeof(caps)); | 116 // TODO(henryhsu): Do we need V4L2_CAP_STREAMING capability? |
138 const __u32 kCapsRequired = V4L2_CAP_VIDEO_CAPTURE_MPLANE | | 117 const __u32 kCapsRequired = V4L2_CAP_VIDEO_CAPTURE | |
wuchengli
2015/05/25 10:29:23
We should only need V4L2_CAP_VIDEO_M2M_MPLANE | V4
henryhsu
2015/06/05 03:28:55
s5p-jpeg doesn't support multiple planar. I think
| |
139 V4L2_CAP_VIDEO_OUTPUT_MPLANE | V4L2_CAP_STREAMING; | 118 V4L2_CAP_VIDEO_OUTPUT; |
140 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYCAP, &caps); | 119 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYCAP, &caps); |
141 if ((caps.capabilities & kCapsRequired) != kCapsRequired) { | 120 if ((caps.capabilities & kCapsRequired) != kCapsRequired) { |
142 LOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP: " | 121 LOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP" |
143 "caps check failed: 0x" << std::hex << caps.capabilities; | 122 ", caps check failed: 0x" << std::hex << caps.capabilities; |
144 return false; | 123 return false; |
145 } | 124 } |
146 | 125 |
147 if (!CreateInputBuffers() || !CreateOutputBuffers()) | |
148 return false; | |
149 | |
150 if (!device_thread_.Start()) { | 126 if (!device_thread_.Start()) { |
151 LOG(ERROR) << "Initialize(): encoder thread failed to start"; | 127 LOG(ERROR) << "Initialize(): encoder thread failed to start"; |
152 return false; | 128 return false; |
153 } | 129 } |
154 | 130 |
155 // StartDevicePoll will NOTIFY_ERROR on failure, so IgnoreResult is fine here. | 131 // StartDevicePoll will NotifyError on failure, so IgnoreResult is fine here. |
kcwu
2015/05/26 10:47:22
Nobody use the return value of StartDevicePoll. Ho
henryhsu
2015/06/05 03:28:55
Done.
| |
156 device_thread_.message_loop()->PostTask( | 132 device_thread_.message_loop()->PostTask( |
157 FROM_HERE, | 133 FROM_HERE, |
158 base::Bind(base::IgnoreResult(&V4L2ImageProcessor::StartDevicePoll), | 134 base::Bind( |
159 base::Unretained(this))); | 135 base::IgnoreResult(&V4L2JpegDecodeAccelerator::StartDevicePoll), |
136 base::Unretained(this))); | |
160 | 137 |
161 DVLOG(1) << "V4L2ImageProcessor initialized for " | 138 DVLOG(1) << "V4L2JpegDecodeAccelerator initialized."; |
162 << " input_format:" | |
163 << media::VideoFrame::FormatToString(input_format) | |
164 << ", output_format:" | |
165 << media::VideoFrame::FormatToString(output_format) | |
166 << ", input_visible_size: " << input_visible_size.ToString() | |
167 << ", input_allocated_size: " << input_allocated_size_.ToString() | |
168 << ", output_visible_size: " << output_visible_size.ToString() | |
169 << ", output_allocated_size: " << output_allocated_size.ToString(); | |
170 | 139 |
171 return true; | 140 return true; |
172 } | 141 } |
173 | 142 |
174 void V4L2ImageProcessor::Process(const scoped_refptr<media::VideoFrame>& frame, | 143 void V4L2JpegDecodeAccelerator::Decode( |
175 const FrameReadyCB& cb) { | 144 const media::BitstreamBuffer& bitstream_buffer, |
176 DVLOG(3) << __func__ << ": ts=" << frame->timestamp().InMilliseconds(); | 145 const scoped_refptr<media::VideoFrame>& video_frame) { |
146 DVLOG(1) << "Decode(): input_id=" << bitstream_buffer.id() | |
147 << ", size=" << bitstream_buffer.size(); | |
148 DCHECK(io_message_loop_proxy_->BelongsToCurrentThread()); | |
149 // TODO(henryhsu): DCHECK or DCHECK | |
150 DCHECK_EQ(video_frame->format(), media::VideoFrame::I420); | |
177 | 151 |
178 scoped_ptr<JobRecord> job_record(new JobRecord()); | 152 scoped_ptr<JobRecord> job_record( |
179 job_record->frame = frame; | 153 new JobRecord(bitstream_buffer, video_frame)); |
180 job_record->ready_cb = cb; | |
181 | 154 |
182 device_thread_.message_loop()->PostTask( | 155 device_thread_.message_loop()->PostTask( |
183 FROM_HERE, | 156 FROM_HERE, |
184 base::Bind(&V4L2ImageProcessor::ProcessTask, | 157 base::Bind(&V4L2JpegDecodeAccelerator::DecodeTask, |
185 base::Unretained(this), | 158 base::Unretained(this), |
186 base::Passed(&job_record))); | 159 base::Passed(&job_record))); |
187 } | 160 } |
188 | 161 |
189 void V4L2ImageProcessor::ProcessTask(scoped_ptr<JobRecord> job_record) { | 162 void V4L2JpegDecodeAccelerator::DecodeTask(scoped_ptr<JobRecord> job_record) { |
190 DCHECK_EQ(device_thread_.message_loop(), base::MessageLoop::current()); | 163 DCHECK_EQ(device_thread_.message_loop(), base::MessageLoop::current()); |
191 | |
192 input_queue_.push(make_linked_ptr(job_record.release())); | 164 input_queue_.push(make_linked_ptr(job_record.release())); |
165 if (!CheckBufferAttributes()) | |
166 return; | |
193 Enqueue(); | 167 Enqueue(); |
194 } | 168 } |
195 | 169 |
196 void V4L2ImageProcessor::Destroy() { | 170 void V4L2JpegDecodeAccelerator::Destroy() { |
197 DVLOG(3) << __func__; | 171 DVLOG(3) << __func__; |
198 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); | 172 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
199 | 173 |
200 // If the device thread is running, destroy using posted task. | 174 // If the device thread is running, destroy using posted task. |
201 if (device_thread_.IsRunning()) { | 175 if (device_thread_.IsRunning()) { |
202 device_thread_.message_loop()->PostTask( | 176 device_thread_.message_loop()->PostTask( |
203 FROM_HERE, | 177 FROM_HERE, |
204 base::Bind(&V4L2ImageProcessor::DestroyTask, base::Unretained(this))); | 178 base::Bind(&V4L2JpegDecodeAccelerator::DestroyTask, |
179 base::Unretained(this))); | |
205 // Wait for tasks to finish/early-exit. | 180 // Wait for tasks to finish/early-exit. |
206 device_thread_.Stop(); | 181 device_thread_.Stop(); |
207 } else { | 182 } else { |
208 // Otherwise DestroyTask() is not needed. | 183 // Otherwise DestroyTask() is not needed. |
209 DCHECK(!device_poll_thread_.IsRunning()); | 184 DCHECK(!device_poll_thread_.IsRunning()); |
210 DCHECK(!device_weak_factory_.HasWeakPtrs()); | 185 DCHECK(!device_weak_factory_.HasWeakPtrs()); |
211 } | 186 } |
212 | |
213 delete this; | 187 delete this; |
214 } | 188 } |
215 | 189 |
216 void V4L2ImageProcessor::DestroyTask() { | 190 void V4L2JpegDecodeAccelerator::DestroyTask() { |
217 DCHECK_EQ(device_thread_.message_loop(), base::MessageLoop::current()); | 191 DCHECK_EQ(device_thread_.message_loop(), base::MessageLoop::current()); |
218 | 192 |
219 device_weak_factory_.InvalidateWeakPtrs(); | 193 device_weak_factory_.InvalidateWeakPtrs(); |
220 | 194 |
221 // Stop streaming and the device_poll_thread_. | 195 // Stop streaming and the device_poll_thread_. |
222 StopDevicePoll(); | 196 StopDevicePoll(); |
223 } | 197 } |
224 | 198 |
225 bool V4L2ImageProcessor::CreateInputBuffers() { | 199 bool V4L2JpegDecodeAccelerator::CheckBufferAttributes() { |
226 DVLOG(3) << __func__; | 200 DVLOG(3) << __func__; |
227 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); | 201 DCHECK_EQ(device_thread_.message_loop(), base::MessageLoop::current()); |
202 DCHECK(!input_queue_.empty()); | |
203 linked_ptr<JobRecord> job_record = input_queue_.front(); | |
204 | |
205 // Check image resolution and format are the same as previous. | |
206 if (job_record->frame->format() != output_format_ || | |
207 job_record->frame->coded_size() != image_coded_size_) { | |
208 image_coded_size_ = job_record->frame->coded_size(); | |
209 output_format_ = job_record->frame->format(); | |
210 if (input_streamon_ || output_streamon_) { | |
211 // ResetBuffers(); | |
212 // input_queue_.push(job_record); | |
213 } else if (!CreateInputBuffers() || !CreateOutputBuffers()) { | |
214 return false; | |
215 } | |
216 } | |
217 return true; | |
218 } | |
219 | |
220 //void V4L2JpegDecodeAccelerator::ResetBuffers() { | |
221 // DVLOG(3) << __func__; | |
222 // if (!StopDevicePoll()) | |
223 // return; | |
224 //} | |
225 | |
226 bool V4L2JpegDecodeAccelerator::CreateInputBuffers() { | |
227 DVLOG(3) << __func__; | |
228 DCHECK_EQ(device_thread_.message_loop(), base::MessageLoop::current()); | |
228 DCHECK(!input_streamon_); | 229 DCHECK(!input_streamon_); |
229 | 230 |
230 struct v4l2_control control; | 231 DCHECK(!input_queue_.empty()); |
231 memset(&control, 0, sizeof(control)); | 232 linked_ptr<JobRecord> job_record = input_queue_.front(); |
232 control.id = V4L2_CID_ROTATE; | 233 size_t reserve_size = job_record->bitstream_buffer.size() * 2; |
233 control.value = 0; | |
234 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_CTRL, &control); | |
235 | |
236 memset(&control, 0, sizeof(control)); | |
237 control.id = V4L2_CID_HFLIP; | |
238 control.value = 0; | |
239 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_CTRL, &control); | |
240 | |
241 memset(&control, 0, sizeof(control)); | |
242 control.id = V4L2_CID_VFLIP; | |
243 control.value = 0; | |
244 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_CTRL, &control); | |
245 | |
246 memset(&control, 0, sizeof(control)); | |
247 control.id = V4L2_CID_ALPHA_COMPONENT; | |
248 control.value = 255; | |
249 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_CTRL, &control); | |
250 | 234 |
251 struct v4l2_format format; | 235 struct v4l2_format format; |
252 memset(&format, 0, sizeof(format)); | 236 memset(&format, 0, sizeof(format)); |
253 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | 237 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; |
254 format.fmt.pix_mp.width = input_visible_size_.width(); | 238 format.fmt.pix.width = job_record->frame->coded_size().width(); |
255 format.fmt.pix_mp.height = input_visible_size_.height(); | 239 format.fmt.pix.height = job_record->frame->coded_size().height(); |
256 format.fmt.pix_mp.pixelformat = input_format_fourcc_; | 240 format.fmt.pix.pixelformat = V4L2_PIX_FMT_JPEG; |
257 format.fmt.pix_mp.num_planes = input_planes_count_; | 241 format.fmt.pix.sizeimage = reserve_size; |
258 for (size_t i = 0; i < input_planes_count_; ++i) { | 242 format.fmt.pix.field = V4L2_FIELD_ANY; |
259 format.fmt.pix_mp.plane_fmt[i].sizeimage = | 243 format.fmt.pix.bytesperline = 0; |
260 media::VideoFrame::PlaneAllocationSize( | |
261 input_format_, i, input_allocated_size_); | |
262 format.fmt.pix_mp.plane_fmt[i].bytesperline = | |
263 base::checked_cast<__u32>(input_allocated_size_.width()); | |
264 } | |
265 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format); | 244 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format); |
266 | 245 |
267 input_allocated_size_ = V4L2Device::CodedSizeFromV4L2Format(format); | |
268 DCHECK(gfx::Rect(input_allocated_size_).Contains( | |
269 gfx::Rect(input_visible_size_))); | |
270 | |
271 struct v4l2_crop crop; | |
272 memset(&crop, 0, sizeof(crop)); | |
273 crop.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | |
274 crop.c.left = 0; | |
275 crop.c.top = 0; | |
276 crop.c.width = base::checked_cast<__u32>(input_visible_size_.width()); | |
277 crop.c.height = base::checked_cast<__u32>(input_visible_size_.height()); | |
278 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_CROP, &crop); | |
279 | |
280 struct v4l2_requestbuffers reqbufs; | 246 struct v4l2_requestbuffers reqbufs; |
281 memset(&reqbufs, 0, sizeof(reqbufs)); | 247 memset(&reqbufs, 0, sizeof(reqbufs)); |
282 reqbufs.count = kInputBufferCount; | 248 reqbufs.count = kInputBufferCount; |
283 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | 249 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; |
284 reqbufs.memory = V4L2_MEMORY_USERPTR; | 250 reqbufs.memory = V4L2_MEMORY_MMAP; |
285 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs); | 251 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs); |
286 | 252 |
287 DCHECK(input_buffer_map_.empty()); | 253 DCHECK(input_buffer_map_.empty()); |
288 input_buffer_map_.resize(reqbufs.count); | 254 input_buffer_map_.resize(reqbufs.count); |
289 | 255 |
290 for (size_t i = 0; i < input_buffer_map_.size(); ++i) | 256 for (size_t i = 0; i < input_buffer_map_.size(); ++i) { |
291 free_input_buffers_.push_back(i); | 257 free_input_buffers_.push_back(i); |
292 | 258 |
259 struct v4l2_buffer buffer; | |
260 memset(&buffer, 0, sizeof(buffer)); | |
261 buffer.index = i; | |
262 buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; | |
263 buffer.memory = V4L2_MEMORY_MMAP; | |
264 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer); | |
265 void* address = device_->Mmap(NULL, | |
266 buffer.length, | |
267 PROT_READ | PROT_WRITE, | |
268 MAP_SHARED, | |
269 buffer.m.offset); | |
270 if (address == MAP_FAILED) { | |
271 PLOG(ERROR) << "CreateInputBuffers(): mmap() failed"; | |
272 return false; | |
273 } | |
274 input_buffer_map_[i].address = address; | |
275 input_buffer_map_[i].length = buffer.length; | |
276 } | |
277 | |
293 return true; | 278 return true; |
294 } | 279 } |
295 | 280 |
296 bool V4L2ImageProcessor::CreateOutputBuffers() { | 281 bool V4L2JpegDecodeAccelerator::CreateOutputBuffers() { |
297 DVLOG(3) << __func__; | 282 DVLOG(3) << __func__; |
298 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); | 283 DCHECK_EQ(device_thread_.message_loop(), base::MessageLoop::current()); |
299 DCHECK(!output_streamon_); | 284 DCHECK(!output_streamon_); |
300 | 285 |
286 DCHECK(!input_queue_.empty()); | |
287 linked_ptr<JobRecord> job_record = input_queue_.front(); | |
288 | |
289 size_t frame_size = media::VideoFrame::AllocationSize( | |
290 output_format_,job_record->frame->coded_size()); | |
291 // TODO(henryhsu): not support YUV410M format | |
292 uint32 output_format_fourcc_ = V4L2_PIX_FMT_YUV420; | |
293 // V4L2Device::VideoFrameFormatToV4L2PixFmt(output_format_); | |
301 struct v4l2_format format; | 294 struct v4l2_format format; |
302 memset(&format, 0, sizeof(format)); | 295 memset(&format, 0, sizeof(format)); |
303 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | 296 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
304 format.fmt.pix_mp.width = output_allocated_size_.width(); | 297 format.fmt.pix.width = job_record->frame->coded_size().width(); |
305 format.fmt.pix_mp.height = output_allocated_size_.height(); | 298 format.fmt.pix.height = job_record->frame->coded_size().height(); |
306 format.fmt.pix_mp.pixelformat = output_format_fourcc_; | 299 format.fmt.pix.sizeimage = frame_size; |
307 format.fmt.pix_mp.num_planes = output_planes_count_; | 300 format.fmt.pix.pixelformat = output_format_fourcc_; |
308 for (size_t i = 0; i < output_planes_count_; ++i) { | 301 format.fmt.pix.field = V4L2_FIELD_ANY; |
309 format.fmt.pix_mp.plane_fmt[i].sizeimage = | |
310 media::VideoFrame::PlaneAllocationSize( | |
311 output_format_, i, output_allocated_size_); | |
312 format.fmt.pix_mp.plane_fmt[i].bytesperline = | |
313 base::checked_cast<__u32>(output_allocated_size_.width()); | |
314 } | |
315 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format); | 302 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format); |
316 | 303 |
317 gfx::Size adjusted_allocated_size = | |
318 V4L2Device::CodedSizeFromV4L2Format(format); | |
319 DCHECK(gfx::Rect(adjusted_allocated_size).Contains( | |
320 gfx::Rect(output_allocated_size_))); | |
321 output_allocated_size_ = adjusted_allocated_size; | |
322 | |
323 struct v4l2_crop crop; | |
324 memset(&crop, 0, sizeof(crop)); | |
325 crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
326 crop.c.left = 0; | |
327 crop.c.top = 0; | |
328 crop.c.width = base::checked_cast<__u32>(output_visible_size_.width()); | |
329 crop.c.height = base::checked_cast<__u32>(output_visible_size_.height()); | |
330 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_CROP, &crop); | |
331 | |
332 struct v4l2_requestbuffers reqbufs; | 304 struct v4l2_requestbuffers reqbufs; |
333 memset(&reqbufs, 0, sizeof(reqbufs)); | 305 memset(&reqbufs, 0, sizeof(reqbufs)); |
334 reqbufs.count = kOutputBufferCount; | 306 reqbufs.count = kOutputBufferCount; |
335 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | 307 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
336 reqbufs.memory = V4L2_MEMORY_MMAP; | 308 reqbufs.memory = V4L2_MEMORY_MMAP; |
337 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs); | 309 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs); |
338 | 310 |
339 DCHECK(output_buffer_map_.empty()); | 311 DCHECK(output_buffer_map_.empty()); |
340 output_buffer_map_.resize(reqbufs.count); | 312 output_buffer_map_.resize(reqbufs.count); |
313 | |
341 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { | 314 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { |
342 OutputRecord& output_record = output_buffer_map_[i]; | 315 free_output_buffers_.push_back(i); |
343 output_record.fds.resize(output_planes_count_); | 316 |
344 for (size_t j = 0; j < output_planes_count_; ++j) { | 317 struct v4l2_buffer buffer; |
345 struct v4l2_exportbuffer expbuf; | 318 memset(&buffer, 0, sizeof(buffer)); |
346 memset(&expbuf, 0, sizeof(expbuf)); | 319 buffer.index = i; |
347 expbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | 320 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
348 expbuf.index = i; | 321 buffer.memory = V4L2_MEMORY_MMAP; |
349 expbuf.plane = j; | 322 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer); |
350 expbuf.flags = O_CLOEXEC; | 323 void* address = device_->Mmap(NULL, |
351 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_EXPBUF, &expbuf); | 324 buffer.length, |
352 output_record.fds[j] = expbuf.fd; | 325 PROT_READ | PROT_WRITE, |
326 MAP_SHARED, | |
327 buffer.m.offset); | |
328 if (address == MAP_FAILED) { | |
329 PLOG(ERROR) << "CreateOutputBuffers(): mmap() failed"; | |
330 return false; | |
353 } | 331 } |
354 free_output_buffers_.push_back(i); | 332 output_buffer_map_[i].address = address; |
333 output_buffer_map_[i].length = buffer.length; | |
355 } | 334 } |
356 | 335 |
357 return true; | 336 return true; |
358 } | 337 } |
359 | 338 |
360 void V4L2ImageProcessor::DestroyInputBuffers() { | 339 void V4L2JpegDecodeAccelerator::DestroyInputBuffers() { |
361 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); | 340 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
362 DCHECK(!input_streamon_); | 341 DCHECK(!input_streamon_); |
363 | 342 |
343 for (size_t buf = 0; buf < input_buffer_map_.size(); ++buf) { | |
344 InputRecord& input_record = input_buffer_map_[buf]; | |
345 device_->Munmap(input_record.address, input_record.length); | |
346 } | |
347 | |
364 struct v4l2_requestbuffers reqbufs; | 348 struct v4l2_requestbuffers reqbufs; |
365 memset(&reqbufs, 0, sizeof(reqbufs)); | 349 memset(&reqbufs, 0, sizeof(reqbufs)); |
366 reqbufs.count = 0; | 350 reqbufs.count = 0; |
367 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | 351 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; |
368 reqbufs.memory = V4L2_MEMORY_USERPTR; | 352 reqbufs.memory = V4L2_MEMORY_MMAP; |
369 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs); | 353 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs); |
370 | 354 |
371 input_buffer_map_.clear(); | 355 input_buffer_map_.clear(); |
372 free_input_buffers_.clear(); | 356 free_input_buffers_.clear(); |
373 } | 357 } |
374 | 358 |
375 void V4L2ImageProcessor::DestroyOutputBuffers() { | 359 void V4L2JpegDecodeAccelerator::DestroyOutputBuffers() { |
376 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); | 360 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
377 DCHECK(!output_streamon_); | 361 DCHECK(!output_streamon_); |
378 | 362 |
379 for (size_t buf = 0; buf < output_buffer_map_.size(); ++buf) { | 363 for (size_t buf = 0; buf < output_buffer_map_.size(); ++buf) { |
380 OutputRecord& output_record = output_buffer_map_[buf]; | 364 OutputRecord& output_record = output_buffer_map_[buf]; |
381 for (size_t plane = 0; plane < output_record.fds.size(); ++plane) | 365 device_->Munmap(output_record.address, output_record.length); |
382 close(output_record.fds[plane]); | |
383 output_record.fds.clear(); | |
384 } | 366 } |
385 | 367 |
386 struct v4l2_requestbuffers reqbufs; | 368 struct v4l2_requestbuffers reqbufs; |
387 memset(&reqbufs, 0, sizeof(reqbufs)); | 369 memset(&reqbufs, 0, sizeof(reqbufs)); |
388 reqbufs.count = 0; | 370 reqbufs.count = 0; |
389 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | 371 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
390 reqbufs.memory = V4L2_MEMORY_MMAP; | 372 reqbufs.memory = V4L2_MEMORY_MMAP; |
391 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs); | 373 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs); |
392 | 374 |
393 output_buffer_map_.clear(); | 375 output_buffer_map_.clear(); |
394 free_output_buffers_.clear(); | 376 free_output_buffers_.clear(); |
395 } | 377 } |
396 | 378 |
397 void V4L2ImageProcessor::DevicePollTask(bool poll_device) { | 379 void V4L2JpegDecodeAccelerator::DevicePollTask(bool poll_device) { |
398 DCHECK_EQ(device_poll_thread_.message_loop(), base::MessageLoop::current()); | 380 DCHECK_EQ(device_poll_thread_.message_loop(), base::MessageLoop::current()); |
399 | 381 |
400 bool event_pending; | 382 bool event_pending; |
401 if (!device_->Poll(poll_device, &event_pending)) { | 383 if (!device_->Poll(poll_device, &event_pending)) { |
402 NOTIFY_ERROR(); | 384 NotifyError(-1, media::JpegDecodeAccelerator::PLATFORM_FAILURE); |
403 return; | 385 return; |
404 } | 386 } |
405 | 387 |
406 // All processing should happen on ServiceDeviceTask(), since we shouldn't | 388 // All processing should happen on ServiceDeviceTask(), since we shouldn't |
407 // touch encoder state from this thread. | 389 // touch encoder state from this thread. |
408 device_thread_.message_loop()->PostTask( | 390 device_thread_.message_loop()->PostTask( |
409 FROM_HERE, | 391 FROM_HERE, |
410 base::Bind(&V4L2ImageProcessor::ServiceDeviceTask, | 392 base::Bind(&V4L2JpegDecodeAccelerator::ServiceDeviceTask, |
411 base::Unretained(this))); | 393 base::Unretained(this))); |
412 } | 394 } |
413 | 395 |
414 void V4L2ImageProcessor::ServiceDeviceTask() { | 396 void V4L2JpegDecodeAccelerator::ServiceDeviceTask() { |
415 DCHECK_EQ(device_thread_.message_loop(), base::MessageLoop::current()); | 397 DCHECK_EQ(device_thread_.message_loop(), base::MessageLoop::current()); |
416 // ServiceDeviceTask() should only ever be scheduled from DevicePollTask(), | 398 // ServiceDeviceTask() should only ever be scheduled from DevicePollTask(), |
417 // so either: | 399 // so either: |
418 // * device_poll_thread_ is running normally | 400 // * device_poll_thread_ is running normally |
419 // * device_poll_thread_ scheduled us, but then a DestroyTask() shut it down, | 401 // * device_poll_thread_ scheduled us, but then a DestroyTask() shut it down, |
420 // in which case we should early-out. | 402 // in which case we should early-out. |
421 if (!device_poll_thread_.message_loop()) | 403 if (!device_poll_thread_.message_loop()) |
422 return; | 404 return; |
423 | 405 |
424 Dequeue(); | 406 Dequeue(); |
425 Enqueue(); | 407 Enqueue(); |
426 | 408 |
427 if (!device_->ClearDevicePollInterrupt()) | 409 if (!device_->ClearDevicePollInterrupt()) { |
428 return; | 410 return; |
411 } | |
429 | 412 |
430 bool poll_device = | 413 bool poll_device = |
431 (input_buffer_queued_count_ > 0 && output_buffer_queued_count_ > 0); | 414 (input_buffer_queued_count_ > 0 && output_buffer_queued_count_ > 0); |
432 | 415 |
433 device_poll_thread_.message_loop()->PostTask( | 416 device_poll_thread_.message_loop()->PostTask( |
434 FROM_HERE, | 417 FROM_HERE, |
435 base::Bind(&V4L2ImageProcessor::DevicePollTask, | 418 base::Bind(&V4L2JpegDecodeAccelerator::DevicePollTask, |
436 base::Unretained(this), | 419 base::Unretained(this), |
437 poll_device)); | 420 poll_device)); |
438 | 421 |
439 DVLOG(2) << __func__ << ": buffer counts: INPUT[" | 422 DVLOG(2) << __func__ << ": buffer counts: INPUT[" |
440 << input_queue_.size() << "] => DEVICE[" | 423 << input_queue_.size() << "] => DEVICE[" |
441 << free_input_buffers_.size() << "+" | 424 << free_input_buffers_.size() << "+" |
442 << input_buffer_queued_count_ << "/" | 425 << input_buffer_queued_count_ << "/" |
443 << input_buffer_map_.size() << "->" | 426 << input_buffer_map_.size() << "->" |
444 << free_output_buffers_.size() << "+" | 427 << free_output_buffers_.size() << "+" |
445 << output_buffer_queued_count_ << "/" | 428 << output_buffer_queued_count_ << "/" |
446 << output_buffer_map_.size() << "] => CLIENT[" | 429 << output_buffer_map_.size() << "] => CLIENT[" |
447 << output_buffer_map_.size() - output_buffer_queued_count_ - | 430 << output_buffer_map_.size() - output_buffer_queued_count_ - |
448 free_output_buffers_.size() << "]"; | 431 free_output_buffers_.size() << "]"; |
449 } | 432 } |
450 | 433 |
451 void V4L2ImageProcessor::Enqueue() { | 434 void V4L2JpegDecodeAccelerator::Enqueue() { |
452 DCHECK_EQ(device_thread_.message_loop(), base::MessageLoop::current()); | 435 DCHECK_EQ(device_thread_.message_loop(), base::MessageLoop::current()); |
453 | 436 |
454 const int old_inputs_queued = input_buffer_queued_count_; | 437 const int old_inputs_queued = input_buffer_queued_count_; |
455 while (!input_queue_.empty() && !free_input_buffers_.empty()) { | 438 while (!input_queue_.empty() && !free_input_buffers_.empty()) { |
456 if (!EnqueueInputRecord()) | 439 if (!EnqueueInputRecord()) |
457 return; | 440 return; |
458 } | 441 } |
459 if (old_inputs_queued == 0 && input_buffer_queued_count_ != 0) { | 442 if (old_inputs_queued == 0 && input_buffer_queued_count_ != 0) { |
460 // We started up a previously empty queue. | 443 // We started up a previously empty queue. |
461 // Queue state changed; signal interrupt. | 444 // Queue state changed; signal interrupt. |
462 if (!device_->SetDevicePollInterrupt()) | 445 if (!device_->SetDevicePollInterrupt()) { |
kcwu
2015/05/26 10:47:23
Why do we need to SetDevicePollInterrupt?
Assume t
henryhsu
2015/06/05 03:28:55
As discussed before, I removed this to save one tr
| |
463 return; | 446 return; |
447 } | |
464 // VIDIOC_STREAMON if we haven't yet. | 448 // VIDIOC_STREAMON if we haven't yet. |
465 if (!input_streamon_) { | 449 if (!input_streamon_) { |
466 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | 450 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT; |
467 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type); | 451 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type); |
468 input_streamon_ = true; | 452 input_streamon_ = true; |
469 } | 453 } |
470 } | 454 } |
471 | 455 |
472 // TODO(posciak): Fix this to be non-Exynos specific. | 456 // TODO(posciak): Fix this to be non-Exynos specific. |
wuchengli
2015/05/25 10:29:23
This is a new driver. We have a chance not to have
Pawel Osciak
2015/05/25 11:05:01
As far as I know this only applies to GSC, I don't
henryhsu
2015/06/05 03:28:55
Done.
| |
473 // Exynos GSC is liable to race conditions if more than one output buffer is | 457 // Exynos GSC is liable to race conditions if more than one output buffer is |
474 // simultaneously enqueued, so enqueue just one. | 458 // simultaneously enqueued, so enqueue just one. |
475 if (output_buffer_queued_count_ == 0 && !free_output_buffers_.empty()) { | 459 if (output_buffer_queued_count_ == 0 && !free_output_buffers_.empty()) { |
476 const int old_outputs_queued = output_buffer_queued_count_; | 460 const int old_outputs_queued = output_buffer_queued_count_; |
477 if (!EnqueueOutputRecord()) | 461 if (!EnqueueOutputRecord()) |
478 return; | 462 return; |
479 if (old_outputs_queued == 0 && output_buffer_queued_count_ != 0) { | 463 if (old_outputs_queued == 0 && output_buffer_queued_count_ != 0) { |
480 // We just started up a previously empty queue. | 464 // We just started up a previously empty queue. |
481 // Queue state changed; signal interrupt. | 465 // Queue state changed; signal interrupt. |
482 if (!device_->SetDevicePollInterrupt()) | 466 if (!device_->SetDevicePollInterrupt()) { |
483 return; | 467 return; |
468 } | |
484 // Start VIDIOC_STREAMON if we haven't yet. | 469 // Start VIDIOC_STREAMON if we haven't yet. |
485 if (!output_streamon_) { | 470 if (!output_streamon_) { |
486 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | 471 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
487 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type); | 472 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type); |
488 output_streamon_ = true; | 473 output_streamon_ = true; |
489 } | 474 } |
490 } | 475 } |
491 } | 476 } |
492 DCHECK_LE(output_buffer_queued_count_, 1); | 477 DCHECK_LE(output_buffer_queued_count_, 1); |
493 } | 478 } |
494 | 479 |
495 void V4L2ImageProcessor::Dequeue() { | 480 void V4L2JpegDecodeAccelerator::Dequeue() { |
496 DCHECK_EQ(device_thread_.message_loop(), base::MessageLoop::current()); | 481 DCHECK_EQ(device_thread_.message_loop(), base::MessageLoop::current()); |
497 | 482 |
498 // Dequeue completed input (VIDEO_OUTPUT) buffers, | 483 // Dequeue completed input (VIDEO_OUTPUT) buffers, |
499 // and recycle to the free list. | 484 // and recycle to the free list. |
500 struct v4l2_buffer dqbuf; | 485 struct v4l2_buffer dqbuf; |
501 struct v4l2_plane planes[VIDEO_MAX_PLANES]; | |
502 while (input_buffer_queued_count_ > 0) { | 486 while (input_buffer_queued_count_ > 0) { |
503 DCHECK(input_streamon_); | 487 DCHECK(input_streamon_); |
504 memset(&dqbuf, 0, sizeof(dqbuf)); | 488 memset(&dqbuf, 0, sizeof(dqbuf)); |
505 memset(&planes, 0, sizeof(planes)); | 489 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; |
506 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | 490 dqbuf.memory = V4L2_MEMORY_MMAP; |
507 dqbuf.memory = V4L2_MEMORY_USERPTR; | |
508 dqbuf.m.planes = planes; | |
509 dqbuf.length = input_planes_count_; | |
510 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) { | 491 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) { |
511 if (errno == EAGAIN) { | 492 if (errno == EAGAIN) { |
512 // EAGAIN if we're just out of buffers to dequeue. | 493 // EAGAIN if we're just out of buffers to dequeue. |
513 break; | 494 break; |
514 } | 495 } |
515 PLOG(ERROR) << "ioctl() failed: VIDIOC_DQBUF"; | 496 PLOG(ERROR) << "ioctl() failed: VIDIOC_DQBUF"; |
516 NOTIFY_ERROR(); | 497 NotifyError(dqbuf.index, media::JpegDecodeAccelerator::PLATFORM_FAILURE); |
517 return; | 498 return; |
518 } | 499 } |
519 InputRecord& input_record = input_buffer_map_[dqbuf.index]; | 500 InputRecord& input_record = input_buffer_map_[dqbuf.index]; |
520 DCHECK(input_record.at_device); | 501 DCHECK(input_record.at_device); |
521 input_record.at_device = false; | 502 input_record.at_device = false; |
522 input_record.frame = NULL; | |
523 free_input_buffers_.push_back(dqbuf.index); | 503 free_input_buffers_.push_back(dqbuf.index); |
524 input_buffer_queued_count_--; | 504 input_buffer_queued_count_--; |
525 } | 505 } |
526 | 506 |
527 // Dequeue completed output (VIDEO_CAPTURE) buffers, recycle to the free list. | 507 // Dequeue completed output (VIDEO_CAPTURE) buffers, recycle to the free list. |
528 // Return the finished buffer to the client via the job ready callback. | 508 // Return the finished buffer to the client via the job ready callback. |
529 while (output_buffer_queued_count_ > 0) { | 509 while (output_buffer_queued_count_ > 0) { |
530 DCHECK(output_streamon_); | 510 DCHECK(output_streamon_); |
531 memset(&dqbuf, 0, sizeof(dqbuf)); | 511 memset(&dqbuf, 0, sizeof(dqbuf)); |
532 memset(&planes, 0, sizeof(planes)); | 512 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
533 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
534 dqbuf.memory = V4L2_MEMORY_MMAP; | 513 dqbuf.memory = V4L2_MEMORY_MMAP; |
535 dqbuf.m.planes = planes; | |
536 dqbuf.length = output_planes_count_; | |
537 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) { | 514 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) { |
538 if (errno == EAGAIN) { | 515 if (errno == EAGAIN) { |
539 // EAGAIN if we're just out of buffers to dequeue. | 516 // EAGAIN if we're just out of buffers to dequeue. |
540 break; | 517 break; |
541 } | 518 } |
542 PLOG(ERROR) << "ioctl() failed: VIDIOC_DQBUF"; | 519 PLOG(ERROR) << "ioctl() failed: VIDIOC_DQBUF"; |
543 NOTIFY_ERROR(); | 520 NotifyError(dqbuf.index, media::JpegDecodeAccelerator::PLATFORM_FAILURE); |
544 return; | 521 return; |
545 } | 522 } |
546 OutputRecord& output_record = output_buffer_map_[dqbuf.index]; | 523 OutputRecord& output_record = output_buffer_map_[dqbuf.index]; |
547 DCHECK(output_record.at_device); | 524 DCHECK(output_record.at_device); |
548 output_record.at_device = false; | 525 output_record.at_device = false; |
549 output_record.at_client = true; | 526 free_output_buffers_.push_back(dqbuf.index); |
550 output_buffer_queued_count_--; | 527 output_buffer_queued_count_--; |
551 | 528 |
552 // Jobs are always processed in FIFO order. | 529 // Jobs are always processed in FIFO order. |
553 DCHECK(!running_jobs_.empty()); | 530 DCHECK(!running_jobs_.empty()); |
554 linked_ptr<JobRecord> job_record = running_jobs_.front(); | 531 linked_ptr<JobRecord> job_record = running_jobs_.front(); |
555 running_jobs_.pop(); | 532 running_jobs_.pop(); |
556 | 533 |
557 scoped_refptr<media::VideoFrame> output_frame = | 534 memcpy(job_record->frame->data(media::VideoFrame::kYPlane), |
558 media::VideoFrame::WrapExternalDmabufs( | 535 output_record.address, |
559 output_format_, | 536 output_record.length); |
560 output_allocated_size_, | |
561 gfx::Rect(output_visible_size_), | |
562 output_visible_size_, | |
563 output_record.fds, | |
564 job_record->frame->timestamp(), | |
565 media::BindToCurrentLoop( | |
566 base::Bind(&V4L2ImageProcessor::ReuseOutputBuffer, | |
567 device_weak_factory_.GetWeakPtr(), | |
568 dqbuf.index))); | |
569 | 537 |
570 DVLOG(3) << "Processing finished, returning frame, ts=" | 538 DVLOG(3) << "Processing finished, returning frame, ts=" |
571 << output_frame->timestamp().InMilliseconds(); | 539 << job_record->frame->timestamp().InMilliseconds(); |
572 | 540 |
573 child_message_loop_proxy_->PostTask( | 541 if (client_) |
574 FROM_HERE, base::Bind(job_record->ready_cb, output_frame)); | 542 client_->VideoFrameReady(job_record->bitstream_buffer.id()); |
575 } | 543 } |
576 } | 544 } |
577 | 545 |
578 void V4L2ImageProcessor::ReuseOutputBuffer(int index) { | 546 bool V4L2JpegDecodeAccelerator::EnqueueInputRecord() { |
579 DVLOG(3) << "Reusing output buffer, index=" << index; | |
580 DCHECK_EQ(device_thread_.message_loop(), base::MessageLoop::current()); | |
581 | |
582 OutputRecord& output_record = output_buffer_map_[index]; | |
583 DCHECK(output_record.at_client); | |
584 DCHECK(!output_record.at_device); | |
585 output_record.at_client = false; | |
586 free_output_buffers_.push_back(index); | |
587 | |
588 Enqueue(); | |
589 } | |
590 | |
591 bool V4L2ImageProcessor::EnqueueInputRecord() { | |
592 DCHECK(!input_queue_.empty()); | 547 DCHECK(!input_queue_.empty()); |
593 DCHECK(!free_input_buffers_.empty()); | 548 DCHECK(!free_input_buffers_.empty()); |
594 | 549 |
595 // Enqueue an input (VIDEO_OUTPUT) buffer for an input video frame. | 550 // Enqueue an input (VIDEO_OUTPUT) buffer for an input video frame. |
596 linked_ptr<JobRecord> job_record = input_queue_.front(); | 551 linked_ptr<JobRecord> job_record = input_queue_.front(); |
597 input_queue_.pop(); | 552 input_queue_.pop(); |
598 const int index = free_input_buffers_.back(); | 553 const int index = free_input_buffers_.back(); |
599 InputRecord& input_record = input_buffer_map_[index]; | 554 InputRecord& input_record = input_buffer_map_[index]; |
600 DCHECK(!input_record.at_device); | 555 DCHECK(!input_record.at_device); |
601 input_record.frame = job_record->frame; | 556 |
557 scoped_ptr<base::SharedMemory> shm( | |
558 new base::SharedMemory(job_record->bitstream_buffer.handle(), true)); | |
559 if (!shm->Map(job_record->bitstream_buffer.size())) { | |
560 LOG(ERROR) << "Decode(): could not map bitstream_buffer"; | |
561 NotifyError(job_record->bitstream_buffer.id(), UNREADABLE_INPUT); | |
562 return false; | |
563 } | |
602 struct v4l2_buffer qbuf; | 564 struct v4l2_buffer qbuf; |
603 struct v4l2_plane qbuf_planes[VIDEO_MAX_PLANES]; | |
604 memset(&qbuf, 0, sizeof(qbuf)); | 565 memset(&qbuf, 0, sizeof(qbuf)); |
605 memset(qbuf_planes, 0, sizeof(qbuf_planes)); | 566 memcpy(input_record.address, shm->memory(), input_record.length); |
606 qbuf.index = index; | 567 qbuf.index = index; |
607 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | 568 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; |
608 qbuf.memory = V4L2_MEMORY_USERPTR; | 569 qbuf.memory = V4L2_MEMORY_MMAP; |
609 qbuf.m.planes = qbuf_planes; | |
610 qbuf.length = input_planes_count_; | |
611 for (size_t i = 0; i < input_planes_count_; ++i) { | |
612 qbuf.m.planes[i].bytesused = media::VideoFrame::PlaneAllocationSize( | |
613 input_record.frame->format(), i, input_allocated_size_); | |
614 qbuf.m.planes[i].length = qbuf.m.planes[i].bytesused; | |
615 qbuf.m.planes[i].m.userptr = | |
616 reinterpret_cast<unsigned long>(input_record.frame->data(i)); | |
617 } | |
618 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf); | 570 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf); |
619 input_record.at_device = true; | 571 input_record.at_device = true; |
620 running_jobs_.push(job_record); | 572 running_jobs_.push(job_record); |
621 free_input_buffers_.pop_back(); | 573 free_input_buffers_.pop_back(); |
622 input_buffer_queued_count_++; | 574 input_buffer_queued_count_++; |
623 | 575 |
624 DVLOG(3) << __func__ << ": enqueued frame ts=" | 576 DVLOG(3) << __func__ << ": enqueued frame ts=" |
625 << job_record->frame->timestamp().InMilliseconds() << " to device."; | 577 << job_record->frame->timestamp().InMilliseconds() << " to device."; |
626 | 578 |
627 return true; | 579 return true; |
628 } | 580 } |
629 | 581 |
630 bool V4L2ImageProcessor::EnqueueOutputRecord() { | 582 bool V4L2JpegDecodeAccelerator::EnqueueOutputRecord() { |
631 DCHECK(!free_output_buffers_.empty()); | 583 DCHECK(!free_output_buffers_.empty()); |
632 | 584 |
633 // Enqueue an output (VIDEO_CAPTURE) buffer. | 585 // Enqueue an output (VIDEO_CAPTURE) buffer. |
634 const int index = free_output_buffers_.back(); | 586 const int index = free_output_buffers_.back(); |
635 OutputRecord& output_record = output_buffer_map_[index]; | 587 OutputRecord& output_record = output_buffer_map_[index]; |
636 DCHECK(!output_record.at_device); | 588 DCHECK(!output_record.at_device); |
637 struct v4l2_buffer qbuf; | 589 struct v4l2_buffer qbuf; |
638 struct v4l2_plane qbuf_planes[VIDEO_MAX_PLANES]; | |
639 memset(&qbuf, 0, sizeof(qbuf)); | 590 memset(&qbuf, 0, sizeof(qbuf)); |
640 memset(qbuf_planes, 0, sizeof(qbuf_planes)); | |
641 qbuf.index = index; | 591 qbuf.index = index; |
642 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | 592 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
643 qbuf.memory = V4L2_MEMORY_MMAP; | 593 qbuf.memory = V4L2_MEMORY_MMAP; |
644 qbuf.m.planes = qbuf_planes; | |
645 qbuf.length = output_planes_count_; | |
646 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf); | 594 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf); |
647 output_record.at_device = true; | 595 output_record.at_device = true; |
648 free_output_buffers_.pop_back(); | 596 free_output_buffers_.pop_back(); |
649 output_buffer_queued_count_++; | 597 output_buffer_queued_count_++; |
650 return true; | 598 return true; |
651 } | 599 } |
652 | 600 |
653 bool V4L2ImageProcessor::StartDevicePoll() { | 601 bool V4L2JpegDecodeAccelerator::StartDevicePoll() { |
654 DVLOG(3) << __func__ << ": starting device poll"; | 602 DVLOG(3) << __func__ << ": starting device poll"; |
655 DCHECK_EQ(device_thread_.message_loop(), base::MessageLoop::current()); | 603 DCHECK_EQ(device_thread_.message_loop(), base::MessageLoop::current()); |
656 DCHECK(!device_poll_thread_.IsRunning()); | 604 DCHECK(!device_poll_thread_.IsRunning()); |
657 | 605 |
658 // Start up the device poll thread and schedule its first DevicePollTask(). | 606 // Start up the device poll thread and schedule its first DevicePollTask(). |
659 if (!device_poll_thread_.Start()) { | 607 if (!device_poll_thread_.Start()) { |
660 LOG(ERROR) << "StartDevicePoll(): Device thread failed to start"; | 608 LOG(ERROR) << "StartDevicePoll(): Device thread failed to start"; |
661 NOTIFY_ERROR(); | 609 NotifyError(-1, media::JpegDecodeAccelerator::PLATFORM_FAILURE); |
662 return false; | 610 return false; |
663 } | 611 } |
664 // Enqueue a poll task with no devices to poll on - will wait only for the | 612 // Enqueue a poll task with no devices to poll on - will wait only for the |
665 // poll interrupt | 613 // poll interrupt |
666 device_poll_thread_.message_loop()->PostTask( | 614 device_poll_thread_.message_loop()->PostTask( |
667 FROM_HERE, | 615 FROM_HERE, |
668 base::Bind( | 616 base::Bind( |
669 &V4L2ImageProcessor::DevicePollTask, base::Unretained(this), false)); | 617 &V4L2JpegDecodeAccelerator::DevicePollTask, |
618 base::Unretained(this), false)); | |
670 | 619 |
671 return true; | 620 return true; |
672 } | 621 } |
673 | 622 |
674 bool V4L2ImageProcessor::StopDevicePoll() { | 623 bool V4L2JpegDecodeAccelerator::StopDevicePoll() { |
675 DVLOG(3) << __func__ << ": stopping device poll"; | 624 DVLOG(3) << __func__ << ": stopping device poll"; |
676 if (device_thread_.IsRunning()) | 625 if (device_thread_.IsRunning()) |
677 DCHECK_EQ(device_thread_.message_loop(), base::MessageLoop::current()); | 626 DCHECK_EQ(device_thread_.message_loop(), base::MessageLoop::current()); |
678 | 627 |
679 // Signal the DevicePollTask() to stop, and stop the device poll thread. | 628 // Signal the DevicePollTask() to stop, and stop the device poll thread. |
680 if (!device_->SetDevicePollInterrupt()) | 629 if (!device_->SetDevicePollInterrupt()) { |
681 return false; | 630 return false; |
631 } | |
682 device_poll_thread_.Stop(); | 632 device_poll_thread_.Stop(); |
683 | 633 |
684 // Clear the interrupt now, to be sure. | 634 // Clear the interrupt now, to be sure. |
685 if (!device_->ClearDevicePollInterrupt()) | 635 if (!device_->ClearDevicePollInterrupt()) { |
686 return false; | 636 return false; |
637 } | |
687 | 638 |
688 if (input_streamon_) { | 639 if (input_streamon_) { |
689 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | 640 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT; |
690 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type); | 641 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type); |
691 } | 642 } |
692 input_streamon_ = false; | 643 input_streamon_ = false; |
693 | 644 |
694 if (output_streamon_) { | 645 if (output_streamon_) { |
695 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | 646 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
696 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type); | 647 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type); |
697 } | 648 } |
698 output_streamon_ = false; | 649 output_streamon_ = false; |
699 | 650 |
700 // Reset all our accounting info. | 651 // Reset all our accounting info. |
701 while (!input_queue_.empty()) | 652 while (!input_queue_.empty()) |
702 input_queue_.pop(); | 653 input_queue_.pop(); |
703 | 654 |
704 while (!running_jobs_.empty()) | 655 while (!running_jobs_.empty()) |
705 running_jobs_.pop(); | 656 running_jobs_.pop(); |
706 | 657 |
707 free_input_buffers_.clear(); | 658 free_input_buffers_.clear(); |
708 for (size_t i = 0; i < input_buffer_map_.size(); ++i) { | 659 for (size_t i = 0; i < input_buffer_map_.size(); ++i) { |
709 InputRecord& input_record = input_buffer_map_[i]; | 660 InputRecord& input_record = input_buffer_map_[i]; |
710 input_record.at_device = false; | 661 input_record.at_device = false; |
711 input_record.frame = NULL; | |
712 free_input_buffers_.push_back(i); | 662 free_input_buffers_.push_back(i); |
713 } | 663 } |
714 input_buffer_queued_count_ = 0; | 664 input_buffer_queued_count_ = 0; |
715 | 665 |
716 free_output_buffers_.clear(); | 666 free_output_buffers_.clear(); |
717 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { | 667 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { |
718 OutputRecord& output_record = output_buffer_map_[i]; | 668 OutputRecord& output_record = output_buffer_map_[i]; |
719 output_record.at_device = false; | 669 output_record.at_device = false; |
720 if (!output_record.at_client) | 670 free_output_buffers_.push_back(i); |
721 free_output_buffers_.push_back(i); | |
722 } | 671 } |
723 output_buffer_queued_count_ = 0; | 672 output_buffer_queued_count_ = 0; |
724 | 673 |
725 return true; | 674 return true; |
726 } | 675 } |
727 | 676 |
728 } // namespace content | 677 } // namespace content |
OLD | NEW |