| OLD | NEW |
| (Empty) |
| 1 // Copyright 2015 The Chromium Authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #include "content/common/gpu/media/v4l2_jpeg_decode_accelerator.h" | |
| 6 | |
| 7 #include <errno.h> | |
| 8 #include <linux/videodev2.h> | |
| 9 #include <string.h> | |
| 10 #include <sys/mman.h> | |
| 11 | |
| 12 #include <memory> | |
| 13 | |
| 14 #include "base/big_endian.h" | |
| 15 #include "base/bind.h" | |
| 16 #include "base/thread_task_runner_handle.h" | |
| 17 #include "media/filters/jpeg_parser.h" | |
| 18 #include "third_party/libyuv/include/libyuv.h" | |
| 19 | |
| 20 #define IOCTL_OR_ERROR_RETURN_VALUE(type, arg, value, type_name) \ | |
| 21 do { \ | |
| 22 if (device_->Ioctl(type, arg) != 0) { \ | |
| 23 PLOG(ERROR) << __func__ << "(): ioctl() failed: " << type_name; \ | |
| 24 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); \ | |
| 25 return value; \ | |
| 26 } \ | |
| 27 } while (0) | |
| 28 | |
| 29 #define IOCTL_OR_ERROR_RETURN(type, arg) \ | |
| 30 IOCTL_OR_ERROR_RETURN_VALUE(type, arg, ((void)0), #type) | |
| 31 | |
| 32 #define IOCTL_OR_ERROR_RETURN_FALSE(type, arg) \ | |
| 33 IOCTL_OR_ERROR_RETURN_VALUE(type, arg, false, #type) | |
| 34 | |
| 35 #define IOCTL_OR_LOG_ERROR(type, arg) \ | |
| 36 do { \ | |
| 37 if (device_->Ioctl(type, arg) != 0) { \ | |
| 38 PLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \ | |
| 39 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); \ | |
| 40 } \ | |
| 41 } while (0) | |
| 42 | |
| 43 #define READ_U8_OR_RETURN_FALSE(reader, out) \ | |
| 44 do { \ | |
| 45 uint8_t _out; \ | |
| 46 if (!reader.ReadU8(&_out)) { \ | |
| 47 DVLOG(1) \ | |
| 48 << "Error in stream: unexpected EOS while trying to read " #out; \ | |
| 49 return false; \ | |
| 50 } \ | |
| 51 *(out) = _out; \ | |
| 52 } while (0) | |
| 53 | |
| 54 #define READ_U16_OR_RETURN_FALSE(reader, out) \ | |
| 55 do { \ | |
| 56 uint16_t _out; \ | |
| 57 if (!reader.ReadU16(&_out)) { \ | |
| 58 DVLOG(1) \ | |
| 59 << "Error in stream: unexpected EOS while trying to read " #out; \ | |
| 60 return false; \ | |
| 61 } \ | |
| 62 *(out) = _out; \ | |
| 63 } while (0) | |
| 64 | |
| 65 namespace content { | |
| 66 | |
| 67 // This is default huffman segment for 8-bit precision luminance and | |
| 68 // chrominance. The default huffman segment is constructed with the tables from | |
| 69 // JPEG standard section K.3. Actually there are no default tables. They are | |
| 70 // typical tables. These tables are useful for many applications. Lots of | |
| 71 // softwares use them as standard tables such as ffmpeg. | |
| 72 const uint8_t kDefaultDhtSeg[] = { | |
| 73 0xFF, 0xC4, 0x01, 0xA2, 0x00, 0x00, 0x01, 0x05, 0x01, 0x01, 0x01, 0x01, | |
| 74 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x02, | |
| 75 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x01, 0x00, 0x03, | |
| 76 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, | |
| 77 0x00, 0x00, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, | |
| 78 0x0A, 0x0B, 0x10, 0x00, 0x02, 0x01, 0x03, 0x03, 0x02, 0x04, 0x03, 0x05, | |
| 79 0x05, 0x04, 0x04, 0x00, 0x00, 0x01, 0x7D, 0x01, 0x02, 0x03, 0x00, 0x04, | |
| 80 0x11, 0x05, 0x12, 0x21, 0x31, 0x41, 0x06, 0x13, 0x51, 0x61, 0x07, 0x22, | |
| 81 0x71, 0x14, 0x32, 0x81, 0x91, 0xA1, 0x08, 0x23, 0x42, 0xB1, 0xC1, 0x15, | |
| 82 0x52, 0xD1, 0xF0, 0x24, 0x33, 0x62, 0x72, 0x82, 0x09, 0x0A, 0x16, 0x17, | |
| 83 0x18, 0x19, 0x1A, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2A, 0x34, 0x35, 0x36, | |
| 84 0x37, 0x38, 0x39, 0x3A, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x4A, | |
| 85 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x5A, 0x63, 0x64, 0x65, 0x66, | |
| 86 0x67, 0x68, 0x69, 0x6A, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7A, | |
| 87 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8A, 0x92, 0x93, 0x94, 0x95, | |
| 88 0x96, 0x97, 0x98, 0x99, 0x9A, 0xA2, 0xA3, 0xA4, 0xA5, 0xA6, 0xA7, 0xA8, | |
| 89 0xA9, 0xAA, 0xB2, 0xB3, 0xB4, 0xB5, 0xB6, 0xB7, 0xB8, 0xB9, 0xBA, 0xC2, | |
| 90 0xC3, 0xC4, 0xC5, 0xC6, 0xC7, 0xC8, 0xC9, 0xCA, 0xD2, 0xD3, 0xD4, 0xD5, | |
| 91 0xD6, 0xD7, 0xD8, 0xD9, 0xDA, 0xE1, 0xE2, 0xE3, 0xE4, 0xE5, 0xE6, 0xE7, | |
| 92 0xE8, 0xE9, 0xEA, 0xF1, 0xF2, 0xF3, 0xF4, 0xF5, 0xF6, 0xF7, 0xF8, 0xF9, | |
| 93 0xFA, 0x11, 0x00, 0x02, 0x01, 0x02, 0x04, 0x04, 0x03, 0x04, 0x07, 0x05, | |
| 94 0x04, 0x04, 0x00, 0x01, 0x02, 0x77, 0x00, 0x01, 0x02, 0x03, 0x11, 0x04, | |
| 95 0x05, 0x21, 0x31, 0x06, 0x12, 0x41, 0x51, 0x07, 0x61, 0x71, 0x13, 0x22, | |
| 96 0x32, 0x81, 0x08, 0x14, 0x42, 0x91, 0xA1, 0xB1, 0xC1, 0x09, 0x23, 0x33, | |
| 97 0x52, 0xF0, 0x15, 0x62, 0x72, 0xD1, 0x0A, 0x16, 0x24, 0x34, 0xE1, 0x25, | |
| 98 0xF1, 0x17, 0x18, 0x19, 0x1A, 0x26, 0x27, 0x28, 0x29, 0x2A, 0x35, 0x36, | |
| 99 0x37, 0x38, 0x39, 0x3A, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x4A, | |
| 100 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x5A, 0x63, 0x64, 0x65, 0x66, | |
| 101 0x67, 0x68, 0x69, 0x6A, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7A, | |
| 102 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8A, 0x92, 0x93, 0x94, | |
| 103 0x95, 0x96, 0x97, 0x98, 0x99, 0x9A, 0xA2, 0xA3, 0xA4, 0xA5, 0xA6, 0xA7, | |
| 104 0xA8, 0xA9, 0xAA, 0xB2, 0xB3, 0xB4, 0xB5, 0xB6, 0xB7, 0xB8, 0xB9, 0xBA, | |
| 105 0xC2, 0xC3, 0xC4, 0xC5, 0xC6, 0xC7, 0xC8, 0xC9, 0xCA, 0xD2, 0xD3, 0xD4, | |
| 106 0xD5, 0xD6, 0xD7, 0xD8, 0xD9, 0xDA, 0xE2, 0xE3, 0xE4, 0xE5, 0xE6, 0xE7, | |
| 107 0xE8, 0xE9, 0xEA, 0xF2, 0xF3, 0xF4, 0xF5, 0xF6, 0xF7, 0xF8, 0xF9, 0xFA | |
| 108 }; | |
| 109 | |
| 110 V4L2JpegDecodeAccelerator::BufferRecord::BufferRecord() | |
| 111 : address(nullptr), length(0), at_device(false) { | |
| 112 } | |
| 113 | |
| 114 V4L2JpegDecodeAccelerator::BufferRecord::~BufferRecord() { | |
| 115 } | |
| 116 | |
| 117 V4L2JpegDecodeAccelerator::JobRecord::JobRecord( | |
| 118 const media::BitstreamBuffer& bitstream_buffer, | |
| 119 scoped_refptr<media::VideoFrame> video_frame) | |
| 120 : bitstream_buffer_id(bitstream_buffer.id()), | |
| 121 shm(bitstream_buffer, true), | |
| 122 out_frame(video_frame) {} | |
| 123 | |
| 124 V4L2JpegDecodeAccelerator::JobRecord::~JobRecord() { | |
| 125 } | |
| 126 | |
| 127 V4L2JpegDecodeAccelerator::V4L2JpegDecodeAccelerator( | |
| 128 const scoped_refptr<V4L2Device>& device, | |
| 129 const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner) | |
| 130 : output_buffer_pixelformat_(0), | |
| 131 child_task_runner_(base::ThreadTaskRunnerHandle::Get()), | |
| 132 io_task_runner_(io_task_runner), | |
| 133 client_(nullptr), | |
| 134 device_(device), | |
| 135 decoder_thread_("V4L2JpegDecodeThread"), | |
| 136 device_poll_thread_("V4L2JpegDecodeDevicePollThread"), | |
| 137 input_streamon_(false), | |
| 138 output_streamon_(false), | |
| 139 weak_factory_(this) { | |
| 140 weak_ptr_ = weak_factory_.GetWeakPtr(); | |
| 141 } | |
| 142 | |
| 143 V4L2JpegDecodeAccelerator::~V4L2JpegDecodeAccelerator() { | |
| 144 DCHECK(child_task_runner_->BelongsToCurrentThread()); | |
| 145 | |
| 146 if (decoder_thread_.IsRunning()) { | |
| 147 decoder_task_runner_->PostTask( | |
| 148 FROM_HERE, base::Bind(&V4L2JpegDecodeAccelerator::DestroyTask, | |
| 149 base::Unretained(this))); | |
| 150 decoder_thread_.Stop(); | |
| 151 } | |
| 152 weak_factory_.InvalidateWeakPtrs(); | |
| 153 DCHECK(!device_poll_thread_.IsRunning()); | |
| 154 } | |
| 155 | |
| 156 void V4L2JpegDecodeAccelerator::DestroyTask() { | |
| 157 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | |
| 158 while (!input_jobs_.empty()) | |
| 159 input_jobs_.pop(); | |
| 160 while (!running_jobs_.empty()) | |
| 161 running_jobs_.pop(); | |
| 162 | |
| 163 // Stop streaming and the device_poll_thread_. | |
| 164 StopDevicePoll(); | |
| 165 | |
| 166 DestroyInputBuffers(); | |
| 167 DestroyOutputBuffers(); | |
| 168 } | |
| 169 | |
| 170 void V4L2JpegDecodeAccelerator::VideoFrameReady(int32_t bitstream_buffer_id) { | |
| 171 DCHECK(child_task_runner_->BelongsToCurrentThread()); | |
| 172 client_->VideoFrameReady(bitstream_buffer_id); | |
| 173 } | |
| 174 | |
| 175 void V4L2JpegDecodeAccelerator::NotifyError(int32_t bitstream_buffer_id, | |
| 176 Error error) { | |
| 177 DCHECK(child_task_runner_->BelongsToCurrentThread()); | |
| 178 LOG(ERROR) << "Notifying of error " << error << " for buffer id " | |
| 179 << bitstream_buffer_id; | |
| 180 client_->NotifyError(bitstream_buffer_id, error); | |
| 181 } | |
| 182 | |
| 183 void V4L2JpegDecodeAccelerator::PostNotifyError( | |
| 184 int32_t bitstream_buffer_id, | |
| 185 Error error) { | |
| 186 child_task_runner_->PostTask( | |
| 187 FROM_HERE, | |
| 188 base::Bind(&V4L2JpegDecodeAccelerator::NotifyError, weak_ptr_, | |
| 189 bitstream_buffer_id, error)); | |
| 190 } | |
| 191 | |
| 192 bool V4L2JpegDecodeAccelerator::Initialize(Client* client) { | |
| 193 DCHECK(child_task_runner_->BelongsToCurrentThread()); | |
| 194 | |
| 195 // Capabilities check. | |
| 196 struct v4l2_capability caps; | |
| 197 const __u32 kCapsRequired = V4L2_CAP_STREAMING | V4L2_CAP_VIDEO_M2M; | |
| 198 memset(&caps, 0, sizeof(caps)); | |
| 199 if (device_->Ioctl(VIDIOC_QUERYCAP, &caps) != 0) { | |
| 200 PLOG(ERROR) << __func__ << ": ioctl() failed: VIDIOC_QUERYCAP"; | |
| 201 return false; | |
| 202 } | |
| 203 if ((caps.capabilities & kCapsRequired) != kCapsRequired) { | |
| 204 LOG(ERROR) << __func__ << ": VIDIOC_QUERYCAP, caps check failed: 0x" | |
| 205 << std::hex << caps.capabilities; | |
| 206 return false; | |
| 207 } | |
| 208 | |
| 209 // Subscribe to the source change event. | |
| 210 struct v4l2_event_subscription sub; | |
| 211 memset(&sub, 0, sizeof(sub)); | |
| 212 sub.type = V4L2_EVENT_SOURCE_CHANGE; | |
| 213 if (device_->Ioctl(VIDIOC_SUBSCRIBE_EVENT, &sub) != 0) { | |
| 214 PLOG(ERROR) << __func__ << ": ioctl() failed: VIDIOC_SUBSCRIBE_EVENT"; | |
| 215 return false; | |
| 216 } | |
| 217 | |
| 218 if (!decoder_thread_.Start()) { | |
| 219 LOG(ERROR) << __func__ << ": decoder thread failed to start"; | |
| 220 return false; | |
| 221 } | |
| 222 client_ = client; | |
| 223 decoder_task_runner_ = decoder_thread_.task_runner(); | |
| 224 | |
| 225 decoder_task_runner_->PostTask( | |
| 226 FROM_HERE, base::Bind(&V4L2JpegDecodeAccelerator::StartDevicePoll, | |
| 227 base::Unretained(this))); | |
| 228 | |
| 229 DVLOG(1) << "V4L2JpegDecodeAccelerator initialized."; | |
| 230 return true; | |
| 231 } | |
| 232 | |
| 233 void V4L2JpegDecodeAccelerator::Decode( | |
| 234 const media::BitstreamBuffer& bitstream_buffer, | |
| 235 const scoped_refptr<media::VideoFrame>& video_frame) { | |
| 236 DVLOG(1) << "Decode(): input_id=" << bitstream_buffer.id() | |
| 237 << ", size=" << bitstream_buffer.size(); | |
| 238 DCHECK(io_task_runner_->BelongsToCurrentThread()); | |
| 239 | |
| 240 if (bitstream_buffer.id() < 0) { | |
| 241 LOG(ERROR) << "Invalid bitstream_buffer, id: " << bitstream_buffer.id(); | |
| 242 if (base::SharedMemory::IsHandleValid(bitstream_buffer.handle())) | |
| 243 base::SharedMemory::CloseHandle(bitstream_buffer.handle()); | |
| 244 PostNotifyError(bitstream_buffer.id(), INVALID_ARGUMENT); | |
| 245 return; | |
| 246 } | |
| 247 | |
| 248 if (video_frame->format() != media::PIXEL_FORMAT_I420) { | |
| 249 PostNotifyError(bitstream_buffer.id(), UNSUPPORTED_JPEG); | |
| 250 return; | |
| 251 } | |
| 252 | |
| 253 std::unique_ptr<JobRecord> job_record( | |
| 254 new JobRecord(bitstream_buffer, video_frame)); | |
| 255 | |
| 256 decoder_task_runner_->PostTask( | |
| 257 FROM_HERE, base::Bind(&V4L2JpegDecodeAccelerator::DecodeTask, | |
| 258 base::Unretained(this), base::Passed(&job_record))); | |
| 259 } | |
| 260 | |
| 261 bool V4L2JpegDecodeAccelerator::IsSupported() { | |
| 262 v4l2_fmtdesc fmtdesc; | |
| 263 memset(&fmtdesc, 0, sizeof(fmtdesc)); | |
| 264 fmtdesc.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; | |
| 265 | |
| 266 for (; device_->Ioctl(VIDIOC_ENUM_FMT, &fmtdesc) == 0; ++fmtdesc.index) { | |
| 267 if (fmtdesc.pixelformat == V4L2_PIX_FMT_JPEG) | |
| 268 return true; | |
| 269 } | |
| 270 return false; | |
| 271 } | |
| 272 | |
| 273 void V4L2JpegDecodeAccelerator::DecodeTask( | |
| 274 std::unique_ptr<JobRecord> job_record) { | |
| 275 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | |
| 276 if (!job_record->shm.Map()) { | |
| 277 PLOG(ERROR) << __func__ << ": could not map bitstream_buffer"; | |
| 278 PostNotifyError(job_record->bitstream_buffer_id, UNREADABLE_INPUT); | |
| 279 return; | |
| 280 } | |
| 281 input_jobs_.push(make_linked_ptr(job_record.release())); | |
| 282 | |
| 283 ServiceDeviceTask(false); | |
| 284 } | |
| 285 | |
| 286 size_t V4L2JpegDecodeAccelerator::InputBufferQueuedCount() { | |
| 287 return input_buffer_map_.size() - free_input_buffers_.size(); | |
| 288 } | |
| 289 | |
| 290 size_t V4L2JpegDecodeAccelerator::OutputBufferQueuedCount() { | |
| 291 return output_buffer_map_.size() - free_output_buffers_.size(); | |
| 292 } | |
| 293 | |
| 294 bool V4L2JpegDecodeAccelerator::ShouldRecreateInputBuffers() { | |
| 295 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | |
| 296 if (input_jobs_.empty()) | |
| 297 return false; | |
| 298 | |
| 299 linked_ptr<JobRecord> job_record = input_jobs_.front(); | |
| 300 // Check input buffer size is enough | |
| 301 return (input_buffer_map_.empty() || | |
| 302 (job_record->shm.size() + sizeof(kDefaultDhtSeg)) > | |
| 303 input_buffer_map_.front().length); | |
| 304 } | |
| 305 | |
| 306 bool V4L2JpegDecodeAccelerator::RecreateInputBuffers() { | |
| 307 DVLOG(3) << __func__; | |
| 308 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | |
| 309 | |
| 310 // If running queue is not empty, we should wait until pending frames finish. | |
| 311 if (!running_jobs_.empty()) | |
| 312 return true; | |
| 313 | |
| 314 DestroyInputBuffers(); | |
| 315 | |
| 316 if (!CreateInputBuffers()) { | |
| 317 LOG(ERROR) << "Create input buffers failed."; | |
| 318 return false; | |
| 319 } | |
| 320 | |
| 321 return true; | |
| 322 } | |
| 323 | |
| 324 bool V4L2JpegDecodeAccelerator::RecreateOutputBuffers() { | |
| 325 DVLOG(3) << __func__; | |
| 326 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | |
| 327 | |
| 328 DestroyOutputBuffers(); | |
| 329 | |
| 330 if (!CreateOutputBuffers()) { | |
| 331 LOG(ERROR) << "Create output buffers failed."; | |
| 332 return false; | |
| 333 } | |
| 334 | |
| 335 return true; | |
| 336 } | |
| 337 | |
| 338 bool V4L2JpegDecodeAccelerator::CreateInputBuffers() { | |
| 339 DVLOG(3) << __func__; | |
| 340 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | |
| 341 DCHECK(!input_streamon_); | |
| 342 DCHECK(!input_jobs_.empty()); | |
| 343 linked_ptr<JobRecord> job_record = input_jobs_.front(); | |
| 344 // The input image may miss huffman table. We didn't parse the image before, | |
| 345 // so we create more to avoid the situation of not enough memory. | |
| 346 // Reserve twice size to avoid recreating input buffer frequently. | |
| 347 size_t reserve_size = (job_record->shm.size() + sizeof(kDefaultDhtSeg)) * 2; | |
| 348 struct v4l2_format format; | |
| 349 memset(&format, 0, sizeof(format)); | |
| 350 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; | |
| 351 format.fmt.pix.pixelformat = V4L2_PIX_FMT_JPEG; | |
| 352 format.fmt.pix.sizeimage = reserve_size; | |
| 353 format.fmt.pix.field = V4L2_FIELD_ANY; | |
| 354 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format); | |
| 355 | |
| 356 struct v4l2_requestbuffers reqbufs; | |
| 357 memset(&reqbufs, 0, sizeof(reqbufs)); | |
| 358 reqbufs.count = kBufferCount; | |
| 359 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; | |
| 360 reqbufs.memory = V4L2_MEMORY_MMAP; | |
| 361 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs); | |
| 362 | |
| 363 DCHECK(input_buffer_map_.empty()); | |
| 364 input_buffer_map_.resize(reqbufs.count); | |
| 365 | |
| 366 for (size_t i = 0; i < input_buffer_map_.size(); ++i) { | |
| 367 free_input_buffers_.push_back(i); | |
| 368 | |
| 369 struct v4l2_buffer buffer; | |
| 370 memset(&buffer, 0, sizeof(buffer)); | |
| 371 buffer.index = i; | |
| 372 buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; | |
| 373 buffer.memory = V4L2_MEMORY_MMAP; | |
| 374 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer); | |
| 375 void* address = device_->Mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, | |
| 376 MAP_SHARED, buffer.m.offset); | |
| 377 if (address == MAP_FAILED) { | |
| 378 PLOG(ERROR) << __func__ << ": mmap() failed"; | |
| 379 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); | |
| 380 return false; | |
| 381 } | |
| 382 input_buffer_map_[i].address = address; | |
| 383 input_buffer_map_[i].length = buffer.length; | |
| 384 } | |
| 385 | |
| 386 return true; | |
| 387 } | |
| 388 | |
| 389 bool V4L2JpegDecodeAccelerator::CreateOutputBuffers() { | |
| 390 DVLOG(3) << __func__; | |
| 391 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | |
| 392 DCHECK(!output_streamon_); | |
| 393 DCHECK(!running_jobs_.empty()); | |
| 394 linked_ptr<JobRecord> job_record = running_jobs_.front(); | |
| 395 | |
| 396 size_t frame_size = media::VideoFrame::AllocationSize( | |
| 397 media::PIXEL_FORMAT_I420, job_record->out_frame->coded_size()); | |
| 398 struct v4l2_format format; | |
| 399 memset(&format, 0, sizeof(format)); | |
| 400 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | |
| 401 format.fmt.pix.width = job_record->out_frame->coded_size().width(); | |
| 402 format.fmt.pix.height = job_record->out_frame->coded_size().height(); | |
| 403 format.fmt.pix.sizeimage = frame_size; | |
| 404 format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420; | |
| 405 format.fmt.pix.field = V4L2_FIELD_ANY; | |
| 406 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format); | |
| 407 output_buffer_pixelformat_ = format.fmt.pix.pixelformat; | |
| 408 output_buffer_coded_size_.SetSize(format.fmt.pix.width, | |
| 409 format.fmt.pix.height); | |
| 410 | |
| 411 struct v4l2_requestbuffers reqbufs; | |
| 412 memset(&reqbufs, 0, sizeof(reqbufs)); | |
| 413 reqbufs.count = kBufferCount; | |
| 414 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | |
| 415 reqbufs.memory = V4L2_MEMORY_MMAP; | |
| 416 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs); | |
| 417 | |
| 418 DCHECK(output_buffer_map_.empty()); | |
| 419 output_buffer_map_.resize(reqbufs.count); | |
| 420 | |
| 421 media::VideoPixelFormat output_format = | |
| 422 V4L2Device::V4L2PixFmtToVideoPixelFormat(output_buffer_pixelformat_); | |
| 423 | |
| 424 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { | |
| 425 free_output_buffers_.push_back(i); | |
| 426 | |
| 427 struct v4l2_buffer buffer; | |
| 428 memset(&buffer, 0, sizeof(buffer)); | |
| 429 buffer.index = i; | |
| 430 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | |
| 431 buffer.memory = V4L2_MEMORY_MMAP; | |
| 432 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer); | |
| 433 | |
| 434 DCHECK_GE(buffer.length, | |
| 435 media::VideoFrame::AllocationSize( | |
| 436 output_format, | |
| 437 gfx::Size(format.fmt.pix.width, format.fmt.pix.height))); | |
| 438 | |
| 439 void* address = device_->Mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, | |
| 440 MAP_SHARED, buffer.m.offset); | |
| 441 if (address == MAP_FAILED) { | |
| 442 PLOG(ERROR) << __func__ << ": mmap() failed"; | |
| 443 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); | |
| 444 return false; | |
| 445 } | |
| 446 output_buffer_map_[i].address = address; | |
| 447 output_buffer_map_[i].length = buffer.length; | |
| 448 } | |
| 449 | |
| 450 return true; | |
| 451 } | |
| 452 | |
| 453 void V4L2JpegDecodeAccelerator::DestroyInputBuffers() { | |
| 454 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | |
| 455 DCHECK(!input_streamon_); | |
| 456 | |
| 457 if (input_streamon_) { | |
| 458 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT; | |
| 459 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMOFF, &type); | |
| 460 input_streamon_ = false; | |
| 461 } | |
| 462 | |
| 463 for (size_t i = 0; i < input_buffer_map_.size(); ++i) { | |
| 464 BufferRecord& input_record = input_buffer_map_[i]; | |
| 465 device_->Munmap(input_record.address, input_record.length); | |
| 466 } | |
| 467 | |
| 468 struct v4l2_requestbuffers reqbufs; | |
| 469 memset(&reqbufs, 0, sizeof(reqbufs)); | |
| 470 reqbufs.count = 0; | |
| 471 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; | |
| 472 reqbufs.memory = V4L2_MEMORY_MMAP; | |
| 473 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs); | |
| 474 | |
| 475 input_buffer_map_.clear(); | |
| 476 free_input_buffers_.clear(); | |
| 477 } | |
| 478 | |
| 479 void V4L2JpegDecodeAccelerator::DestroyOutputBuffers() { | |
| 480 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | |
| 481 DCHECK(!output_streamon_); | |
| 482 | |
| 483 if (output_streamon_) { | |
| 484 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | |
| 485 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMOFF, &type); | |
| 486 output_streamon_ = false; | |
| 487 } | |
| 488 | |
| 489 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { | |
| 490 BufferRecord& output_record = output_buffer_map_[i]; | |
| 491 device_->Munmap(output_record.address, output_record.length); | |
| 492 } | |
| 493 | |
| 494 struct v4l2_requestbuffers reqbufs; | |
| 495 memset(&reqbufs, 0, sizeof(reqbufs)); | |
| 496 reqbufs.count = 0; | |
| 497 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | |
| 498 reqbufs.memory = V4L2_MEMORY_MMAP; | |
| 499 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs); | |
| 500 | |
| 501 output_buffer_map_.clear(); | |
| 502 free_output_buffers_.clear(); | |
| 503 } | |
| 504 | |
| 505 void V4L2JpegDecodeAccelerator::DevicePollTask() { | |
| 506 DCHECK(device_poll_task_runner_->BelongsToCurrentThread()); | |
| 507 | |
| 508 bool event_pending; | |
| 509 if (!device_->Poll(true, &event_pending)) { | |
| 510 PLOG(ERROR) << __func__ << ": Poll device error."; | |
| 511 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); | |
| 512 return; | |
| 513 } | |
| 514 | |
| 515 // All processing should happen on ServiceDeviceTask(), since we shouldn't | |
| 516 // touch decoder state from this thread. | |
| 517 decoder_task_runner_->PostTask( | |
| 518 FROM_HERE, base::Bind(&V4L2JpegDecodeAccelerator::ServiceDeviceTask, | |
| 519 base::Unretained(this), event_pending)); | |
| 520 } | |
| 521 | |
| 522 bool V4L2JpegDecodeAccelerator::DequeueSourceChangeEvent() { | |
| 523 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | |
| 524 | |
| 525 struct v4l2_event ev; | |
| 526 memset(&ev, 0, sizeof(ev)); | |
| 527 | |
| 528 if (device_->Ioctl(VIDIOC_DQEVENT, &ev) == 0) { | |
| 529 if (ev.type == V4L2_EVENT_SOURCE_CHANGE) { | |
| 530 DVLOG(3) << __func__ << ": got source change event: " | |
| 531 << ev.u.src_change.changes; | |
| 532 if (ev.u.src_change.changes & | |
| 533 (V4L2_EVENT_SRC_CH_RESOLUTION | V4L2_EVENT_SRC_CH_PIXELFORMAT)) { | |
| 534 return true; | |
| 535 } | |
| 536 LOG(ERROR) << __func__ << ": unexpected source change event."; | |
| 537 } else { | |
| 538 LOG(ERROR) << __func__ << ": got an event (" << ev.type | |
| 539 << ") we haven't subscribed to."; | |
| 540 } | |
| 541 } else { | |
| 542 LOG(ERROR) << __func__ << ": dequeue event failed."; | |
| 543 } | |
| 544 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); | |
| 545 return false; | |
| 546 } | |
| 547 | |
| 548 void V4L2JpegDecodeAccelerator::ServiceDeviceTask(bool event_pending) { | |
| 549 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | |
| 550 // If DestroyTask() shuts |device_poll_thread_| down, we should early-out. | |
| 551 if (!device_poll_thread_.IsRunning()) | |
| 552 return; | |
| 553 | |
| 554 if (!running_jobs_.empty()) | |
| 555 Dequeue(); | |
| 556 | |
| 557 if (ShouldRecreateInputBuffers() && !RecreateInputBuffers()) | |
| 558 return; | |
| 559 | |
| 560 if (event_pending) { | |
| 561 if (!DequeueSourceChangeEvent()) return; | |
| 562 if (!RecreateOutputBuffers()) return; | |
| 563 } | |
| 564 | |
| 565 EnqueueInput(); | |
| 566 EnqueueOutput(); | |
| 567 | |
| 568 if (!running_jobs_.empty()) { | |
| 569 device_poll_task_runner_->PostTask( | |
| 570 FROM_HERE, base::Bind(&V4L2JpegDecodeAccelerator::DevicePollTask, | |
| 571 base::Unretained(this))); | |
| 572 } | |
| 573 | |
| 574 DVLOG(2) << __func__ << ": buffer counts: INPUT[" | |
| 575 << input_jobs_.size() << "] => DEVICE[" | |
| 576 << free_input_buffers_.size() << "/" | |
| 577 << input_buffer_map_.size() << "->" | |
| 578 << free_output_buffers_.size() << "/" | |
| 579 << output_buffer_map_.size() << "]"; | |
| 580 } | |
| 581 | |
| 582 void V4L2JpegDecodeAccelerator::EnqueueInput() { | |
| 583 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | |
| 584 while (!input_jobs_.empty() && !free_input_buffers_.empty()) { | |
| 585 // If input buffers are required to re-create, do not enqueue input record | |
| 586 // until all pending frames are handled by device. | |
| 587 if (ShouldRecreateInputBuffers()) | |
| 588 break; | |
| 589 if (!EnqueueInputRecord()) | |
| 590 return; | |
| 591 } | |
| 592 // Check here because we cannot STREAMON before QBUF in earlier kernel. | |
| 593 // (kernel version < 3.14) | |
| 594 if (!input_streamon_ && InputBufferQueuedCount()) { | |
| 595 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT; | |
| 596 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type); | |
| 597 input_streamon_ = true; | |
| 598 } | |
| 599 } | |
| 600 | |
| 601 void V4L2JpegDecodeAccelerator::EnqueueOutput() { | |
| 602 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | |
| 603 // Output record can be enqueued because the output coded sizes of the frames | |
| 604 // currently in the pipeline are all the same. | |
| 605 while (running_jobs_.size() > OutputBufferQueuedCount() && | |
| 606 !free_output_buffers_.empty()) { | |
| 607 if (!EnqueueOutputRecord()) | |
| 608 return; | |
| 609 } | |
| 610 // Check here because we cannot STREAMON before QBUF in earlier kernel. | |
| 611 // (kernel version < 3.14) | |
| 612 if (!output_streamon_ && OutputBufferQueuedCount()) { | |
| 613 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | |
| 614 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type); | |
| 615 output_streamon_ = true; | |
| 616 } | |
| 617 } | |
| 618 | |
| 619 static bool CopyOutputImage(const uint32_t src_pixelformat, | |
| 620 const void* src_addr, | |
| 621 const gfx::Size& src_coded_size, | |
| 622 const scoped_refptr<media::VideoFrame>& dst_frame) { | |
| 623 media::VideoPixelFormat format = | |
| 624 V4L2Device::V4L2PixFmtToVideoPixelFormat(src_pixelformat); | |
| 625 size_t src_size = media::VideoFrame::AllocationSize(format, src_coded_size); | |
| 626 uint8_t* dst_y = dst_frame->data(media::VideoFrame::kYPlane); | |
| 627 uint8_t* dst_u = dst_frame->data(media::VideoFrame::kUPlane); | |
| 628 uint8_t* dst_v = dst_frame->data(media::VideoFrame::kVPlane); | |
| 629 size_t dst_y_stride = dst_frame->stride(media::VideoFrame::kYPlane); | |
| 630 size_t dst_u_stride = dst_frame->stride(media::VideoFrame::kUPlane); | |
| 631 size_t dst_v_stride = dst_frame->stride(media::VideoFrame::kVPlane); | |
| 632 | |
| 633 // If the source format is I420, ConvertToI420 will simply copy the frame. | |
| 634 if (libyuv::ConvertToI420(static_cast<uint8_t*>(const_cast<void*>(src_addr)), | |
| 635 src_size, | |
| 636 dst_y, dst_y_stride, | |
| 637 dst_u, dst_u_stride, | |
| 638 dst_v, dst_v_stride, | |
| 639 0, 0, | |
| 640 src_coded_size.width(), | |
| 641 src_coded_size.height(), | |
| 642 dst_frame->coded_size().width(), | |
| 643 dst_frame->coded_size().height(), | |
| 644 libyuv::kRotate0, | |
| 645 src_pixelformat)) { | |
| 646 LOG(ERROR) << "ConvertToI420 failed. Source format: " << src_pixelformat; | |
| 647 return false; | |
| 648 } | |
| 649 return true; | |
| 650 } | |
| 651 | |
| 652 void V4L2JpegDecodeAccelerator::Dequeue() { | |
| 653 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | |
| 654 | |
| 655 // Dequeue completed input (VIDEO_OUTPUT) buffers, | |
| 656 // and recycle to the free list. | |
| 657 struct v4l2_buffer dqbuf; | |
| 658 while (InputBufferQueuedCount() > 0) { | |
| 659 DCHECK(input_streamon_); | |
| 660 memset(&dqbuf, 0, sizeof(dqbuf)); | |
| 661 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; | |
| 662 dqbuf.memory = V4L2_MEMORY_MMAP; | |
| 663 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) { | |
| 664 if (errno == EAGAIN) { | |
| 665 // EAGAIN if we're just out of buffers to dequeue. | |
| 666 break; | |
| 667 } | |
| 668 PLOG(ERROR) << "ioctl() failed: input buffer VIDIOC_DQBUF failed."; | |
| 669 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); | |
| 670 return; | |
| 671 } | |
| 672 BufferRecord& input_record = input_buffer_map_[dqbuf.index]; | |
| 673 DCHECK(input_record.at_device); | |
| 674 input_record.at_device = false; | |
| 675 free_input_buffers_.push_back(dqbuf.index); | |
| 676 | |
| 677 if (dqbuf.flags & V4L2_BUF_FLAG_ERROR) { | |
| 678 DVLOG(1) << "Dequeue input buffer error."; | |
| 679 PostNotifyError(kInvalidBitstreamBufferId, UNSUPPORTED_JPEG); | |
| 680 running_jobs_.pop(); | |
| 681 } | |
| 682 } | |
| 683 | |
| 684 // Dequeue completed output (VIDEO_CAPTURE) buffers, recycle to the free list. | |
| 685 // Return the finished buffer to the client via the job ready callback. | |
| 686 // If dequeued input buffer has an error, the error frame has removed from | |
| 687 // |running_jobs_|. We only have to dequeue output buffer when we actually | |
| 688 // have pending frames in |running_jobs_| and also enqueued output buffers. | |
| 689 while (!running_jobs_.empty() && OutputBufferQueuedCount() > 0) { | |
| 690 DCHECK(output_streamon_); | |
| 691 memset(&dqbuf, 0, sizeof(dqbuf)); | |
| 692 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | |
| 693 // From experiments, using MMAP and memory copy is still faster than | |
| 694 // USERPTR. Also, client doesn't need to consider the buffer alignment and | |
| 695 // JpegDecodeAccelerator API will be simpler. | |
| 696 dqbuf.memory = V4L2_MEMORY_MMAP; | |
| 697 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) { | |
| 698 if (errno == EAGAIN) { | |
| 699 // EAGAIN if we're just out of buffers to dequeue. | |
| 700 break; | |
| 701 } | |
| 702 PLOG(ERROR) << "ioctl() failed: output buffer VIDIOC_DQBUF failed."; | |
| 703 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); | |
| 704 return; | |
| 705 } | |
| 706 BufferRecord& output_record = output_buffer_map_[dqbuf.index]; | |
| 707 DCHECK(output_record.at_device); | |
| 708 output_record.at_device = false; | |
| 709 free_output_buffers_.push_back(dqbuf.index); | |
| 710 | |
| 711 // Jobs are always processed in FIFO order. | |
| 712 linked_ptr<JobRecord> job_record = running_jobs_.front(); | |
| 713 running_jobs_.pop(); | |
| 714 | |
| 715 if (dqbuf.flags & V4L2_BUF_FLAG_ERROR) { | |
| 716 DVLOG(1) << "Dequeue output buffer error."; | |
| 717 PostNotifyError(kInvalidBitstreamBufferId, UNSUPPORTED_JPEG); | |
| 718 } else { | |
| 719 // Copy the decoded data from output buffer to the buffer provided by the | |
| 720 // client. Do format conversion when output format is not | |
| 721 // V4L2_PIX_FMT_YUV420. | |
| 722 if (!CopyOutputImage(output_buffer_pixelformat_, output_record.address, | |
| 723 output_buffer_coded_size_, job_record->out_frame)) { | |
| 724 PostNotifyError(job_record->bitstream_buffer_id, PLATFORM_FAILURE); | |
| 725 return; | |
| 726 } | |
| 727 | |
| 728 DVLOG(3) << "Decoding finished, returning bitstream buffer, id=" | |
| 729 << job_record->bitstream_buffer_id; | |
| 730 | |
| 731 child_task_runner_->PostTask( | |
| 732 FROM_HERE, base::Bind(&V4L2JpegDecodeAccelerator::VideoFrameReady, | |
| 733 weak_ptr_, job_record->bitstream_buffer_id)); | |
| 734 } | |
| 735 } | |
| 736 } | |
| 737 | |
| 738 static bool AddHuffmanTable(const void* input_ptr, | |
| 739 size_t input_size, | |
| 740 void* output_ptr, | |
| 741 size_t output_size) { | |
| 742 DCHECK(input_ptr); | |
| 743 DCHECK(output_ptr); | |
| 744 DCHECK_LE((input_size + sizeof(kDefaultDhtSeg)), output_size); | |
| 745 | |
| 746 base::BigEndianReader reader(static_cast<const char*>(input_ptr), input_size); | |
| 747 bool has_marker_dht = false; | |
| 748 bool has_marker_sos = false; | |
| 749 uint8_t marker1, marker2; | |
| 750 READ_U8_OR_RETURN_FALSE(reader, &marker1); | |
| 751 READ_U8_OR_RETURN_FALSE(reader, &marker2); | |
| 752 if (marker1 != media::JPEG_MARKER_PREFIX || marker2 != media::JPEG_SOI) { | |
| 753 DLOG(ERROR) << __func__ << ": The input is not a Jpeg"; | |
| 754 return false; | |
| 755 } | |
| 756 | |
| 757 // copy SOI marker (0xFF, 0xD8) | |
| 758 memcpy(output_ptr, input_ptr, 2); | |
| 759 size_t current_offset = 2; | |
| 760 | |
| 761 while (!has_marker_sos && !has_marker_dht) { | |
| 762 const char* start_addr = reader.ptr(); | |
| 763 READ_U8_OR_RETURN_FALSE(reader, &marker1); | |
| 764 if (marker1 != media::JPEG_MARKER_PREFIX) { | |
| 765 DLOG(ERROR) << __func__ << ": marker1 != 0xFF"; | |
| 766 return false; | |
| 767 } | |
| 768 do { | |
| 769 READ_U8_OR_RETURN_FALSE(reader, &marker2); | |
| 770 } while (marker2 == media::JPEG_MARKER_PREFIX); // skip fill bytes | |
| 771 | |
| 772 uint16_t size; | |
| 773 READ_U16_OR_RETURN_FALSE(reader, &size); | |
| 774 // The size includes the size field itself. | |
| 775 if (size < sizeof(size)) { | |
| 776 DLOG(ERROR) << __func__ << ": Ill-formed JPEG. Segment size (" << size | |
| 777 << ") is smaller than size field (" << sizeof(size) << ")"; | |
| 778 return false; | |
| 779 } | |
| 780 size -= sizeof(size); | |
| 781 | |
| 782 switch (marker2) { | |
| 783 case media::JPEG_DHT: { | |
| 784 has_marker_dht = true; | |
| 785 break; | |
| 786 } | |
| 787 case media::JPEG_SOS: { | |
| 788 if (!has_marker_dht) { | |
| 789 memcpy(static_cast<uint8_t*>(output_ptr) + current_offset, | |
| 790 kDefaultDhtSeg, sizeof(kDefaultDhtSeg)); | |
| 791 current_offset += sizeof(kDefaultDhtSeg); | |
| 792 } | |
| 793 has_marker_sos = true; | |
| 794 break; | |
| 795 } | |
| 796 default: | |
| 797 break; | |
| 798 } | |
| 799 | |
| 800 if (!reader.Skip(size)) { | |
| 801 DLOG(ERROR) << __func__ << ": Ill-formed JPEG. Remaining size (" | |
| 802 << reader.remaining() | |
| 803 << ") is smaller than header specified (" << size << ")"; | |
| 804 return false; | |
| 805 } | |
| 806 | |
| 807 size_t segment_size = static_cast<size_t>(reader.ptr() - start_addr); | |
| 808 memcpy(static_cast<uint8_t*>(output_ptr) + current_offset, start_addr, | |
| 809 segment_size); | |
| 810 current_offset += segment_size; | |
| 811 } | |
| 812 if (reader.remaining()) { | |
| 813 memcpy(static_cast<uint8_t*>(output_ptr) + current_offset, reader.ptr(), | |
| 814 reader.remaining()); | |
| 815 } | |
| 816 return true; | |
| 817 } | |
| 818 | |
| 819 bool V4L2JpegDecodeAccelerator::EnqueueInputRecord() { | |
| 820 DCHECK(!input_jobs_.empty()); | |
| 821 DCHECK(!free_input_buffers_.empty()); | |
| 822 | |
| 823 // Enqueue an input (VIDEO_OUTPUT) buffer for an input video frame. | |
| 824 linked_ptr<JobRecord> job_record = input_jobs_.front(); | |
| 825 input_jobs_.pop(); | |
| 826 const int index = free_input_buffers_.back(); | |
| 827 BufferRecord& input_record = input_buffer_map_[index]; | |
| 828 DCHECK(!input_record.at_device); | |
| 829 | |
| 830 // It will add default huffman segment if it's missing. | |
| 831 if (!AddHuffmanTable(job_record->shm.memory(), job_record->shm.size(), | |
| 832 input_record.address, input_record.length)) { | |
| 833 PostNotifyError(job_record->bitstream_buffer_id, PARSE_JPEG_FAILED); | |
| 834 return false; | |
| 835 } | |
| 836 | |
| 837 struct v4l2_buffer qbuf; | |
| 838 memset(&qbuf, 0, sizeof(qbuf)); | |
| 839 qbuf.index = index; | |
| 840 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; | |
| 841 qbuf.memory = V4L2_MEMORY_MMAP; | |
| 842 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf); | |
| 843 input_record.at_device = true; | |
| 844 running_jobs_.push(job_record); | |
| 845 free_input_buffers_.pop_back(); | |
| 846 | |
| 847 DVLOG(3) << __func__ | |
| 848 << ": enqueued frame id=" << job_record->bitstream_buffer_id | |
| 849 << " to device."; | |
| 850 return true; | |
| 851 } | |
| 852 | |
| 853 bool V4L2JpegDecodeAccelerator::EnqueueOutputRecord() { | |
| 854 DCHECK(!free_output_buffers_.empty()); | |
| 855 | |
| 856 // Enqueue an output (VIDEO_CAPTURE) buffer. | |
| 857 const int index = free_output_buffers_.back(); | |
| 858 BufferRecord& output_record = output_buffer_map_[index]; | |
| 859 DCHECK(!output_record.at_device); | |
| 860 struct v4l2_buffer qbuf; | |
| 861 memset(&qbuf, 0, sizeof(qbuf)); | |
| 862 qbuf.index = index; | |
| 863 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | |
| 864 qbuf.memory = V4L2_MEMORY_MMAP; | |
| 865 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf); | |
| 866 output_record.at_device = true; | |
| 867 free_output_buffers_.pop_back(); | |
| 868 return true; | |
| 869 } | |
| 870 | |
| 871 void V4L2JpegDecodeAccelerator::StartDevicePoll() { | |
| 872 DVLOG(3) << __func__ << ": starting device poll"; | |
| 873 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | |
| 874 DCHECK(!device_poll_thread_.IsRunning()); | |
| 875 | |
| 876 if (!device_poll_thread_.Start()) { | |
| 877 LOG(ERROR) << __func__ << ": Device thread failed to start"; | |
| 878 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); | |
| 879 return; | |
| 880 } | |
| 881 device_poll_task_runner_ = device_poll_thread_.task_runner(); | |
| 882 } | |
| 883 | |
| 884 bool V4L2JpegDecodeAccelerator::StopDevicePoll() { | |
| 885 DVLOG(3) << __func__ << ": stopping device poll"; | |
| 886 // Signal the DevicePollTask() to stop, and stop the device poll thread. | |
| 887 if (!device_->SetDevicePollInterrupt()) { | |
| 888 LOG(ERROR) << __func__ << ": SetDevicePollInterrupt failed."; | |
| 889 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); | |
| 890 return false; | |
| 891 } | |
| 892 | |
| 893 device_poll_thread_.Stop(); | |
| 894 | |
| 895 // Clear the interrupt now, to be sure. | |
| 896 if (!device_->ClearDevicePollInterrupt()) | |
| 897 return false; | |
| 898 | |
| 899 return true; | |
| 900 } | |
| 901 | |
| 902 } // namespace content | |
| OLD | NEW |