OLD | NEW |
---|---|
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include <errno.h> | 5 #include <errno.h> |
6 #include <fcntl.h> | 6 #include <fcntl.h> |
7 #include <linux/videodev2.h> | 7 #include <linux/videodev2.h> |
8 #include <poll.h> | 8 #include <poll.h> |
9 #include <string.h> | 9 #include <string.h> |
10 #include <sys/eventfd.h> | 10 #include <sys/eventfd.h> |
11 #include <sys/ioctl.h> | 11 #include <sys/ioctl.h> |
12 #include <sys/mman.h> | 12 #include <sys/mman.h> |
13 | 13 |
14 #include "base/bind.h" | 14 #include "base/bind.h" |
15 #include "base/bind_helpers.h" | 15 #include "base/bind_helpers.h" |
16 #include "base/callback.h" | 16 #include "base/callback.h" |
17 #include "base/callback_helpers.h" | 17 #include "base/callback_helpers.h" |
18 #include "base/command_line.h" | 18 #include "base/command_line.h" |
19 #include "base/macros.h" | 19 #include "base/macros.h" |
20 #include "base/numerics/safe_conversions.h" | 20 #include "base/numerics/safe_conversions.h" |
21 #include "base/strings/stringprintf.h" | 21 #include "base/strings/stringprintf.h" |
22 #include "content/common/gpu/media/shared_memory_region.h" | |
23 #include "content/common/gpu/media/v4l2_slice_video_decode_accelerator.h" | |
24 #include "media/base/bind_to_current_loop.h" | 22 #include "media/base/bind_to_current_loop.h" |
25 #include "media/base/media_switches.h" | 23 #include "media/base/media_switches.h" |
24 #include "media/gpu/shared_memory_region.h" | |
25 #include "media/gpu/v4l2_slice_video_decode_accelerator.h" | |
26 #include "ui/gl/gl_context.h" | 26 #include "ui/gl/gl_context.h" |
27 #include "ui/gl/scoped_binders.h" | 27 #include "ui/gl/scoped_binders.h" |
28 | 28 |
29 #define LOGF(level) LOG(level) << __FUNCTION__ << "(): " | 29 #define LOGF(level) LOG(level) << __FUNCTION__ << "(): " |
30 #define DVLOGF(level) DVLOG(level) << __FUNCTION__ << "(): " | 30 #define DVLOGF(level) DVLOG(level) << __FUNCTION__ << "(): " |
31 | 31 |
32 #define NOTIFY_ERROR(x) \ | 32 #define NOTIFY_ERROR(x) \ |
33 do { \ | 33 do { \ |
34 LOG(ERROR) << "Setting error state:" << x; \ | 34 LOG(ERROR) << "Setting error state:" << x; \ |
35 SetErrorState(x); \ | 35 SetErrorState(x); \ |
36 } while (0) | 36 } while (0) |
37 | 37 |
38 #define IOCTL_OR_ERROR_RETURN_VALUE(type, arg, value, type_str) \ | 38 #define IOCTL_OR_ERROR_RETURN_VALUE(type, arg, value, type_str) \ |
39 do { \ | 39 do { \ |
40 if (device_->Ioctl(type, arg) != 0) { \ | 40 if (device_->Ioctl(type, arg) != 0) { \ |
41 PLOG(ERROR) << __FUNCTION__ << "(): ioctl() failed: " << type_str; \ | 41 PLOG(ERROR) << __FUNCTION__ << "(): ioctl() failed: " << type_str; \ |
42 return value; \ | 42 return value; \ |
43 } \ | 43 } \ |
44 } while (0) | 44 } while (0) |
45 | 45 |
46 #define IOCTL_OR_ERROR_RETURN(type, arg) \ | 46 #define IOCTL_OR_ERROR_RETURN(type, arg) \ |
47 IOCTL_OR_ERROR_RETURN_VALUE(type, arg, ((void)0), #type) | 47 IOCTL_OR_ERROR_RETURN_VALUE(type, arg, ((void)0), #type) |
48 | 48 |
49 #define IOCTL_OR_ERROR_RETURN_FALSE(type, arg) \ | 49 #define IOCTL_OR_ERROR_RETURN_FALSE(type, arg) \ |
50 IOCTL_OR_ERROR_RETURN_VALUE(type, arg, false, #type) | 50 IOCTL_OR_ERROR_RETURN_VALUE(type, arg, false, #type) |
51 | 51 |
52 #define IOCTL_OR_LOG_ERROR(type, arg) \ | 52 #define IOCTL_OR_LOG_ERROR(type, arg) \ |
53 do { \ | 53 do { \ |
54 if (device_->Ioctl(type, arg) != 0) \ | 54 if (device_->Ioctl(type, arg) != 0) \ |
55 PLOG(ERROR) << __FUNCTION__ << "(): ioctl() failed: " << #type; \ | 55 PLOG(ERROR) << __FUNCTION__ << "(): ioctl() failed: " << #type; \ |
56 } while (0) | 56 } while (0) |
57 | 57 |
58 namespace content { | 58 namespace media { |
59 | 59 |
60 // static | 60 // static |
61 const uint32_t V4L2SliceVideoDecodeAccelerator::supported_input_fourccs_[] = { | 61 const uint32_t V4L2SliceVideoDecodeAccelerator::supported_input_fourccs_[] = { |
62 V4L2_PIX_FMT_H264_SLICE, V4L2_PIX_FMT_VP8_FRAME, | 62 V4L2_PIX_FMT_H264_SLICE, V4L2_PIX_FMT_VP8_FRAME, |
63 }; | 63 }; |
64 | 64 |
65 class V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface | 65 class V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface |
66 : public base::RefCounted<V4L2DecodeSurface> { | 66 : public base::RefCounted<V4L2DecodeSurface> { |
67 public: | 67 public: |
68 using ReleaseCB = base::Callback<void(int)>; | 68 using ReleaseCB = base::Callback<void(int)>; |
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
148 base::StringAppendF(&out, " %d", ref->output_record()); | 148 base::StringAppendF(&out, " %d", ref->output_record()); |
149 } | 149 } |
150 return out; | 150 return out; |
151 } | 151 } |
152 | 152 |
153 V4L2SliceVideoDecodeAccelerator::InputRecord::InputRecord() | 153 V4L2SliceVideoDecodeAccelerator::InputRecord::InputRecord() |
154 : input_id(-1), | 154 : input_id(-1), |
155 address(nullptr), | 155 address(nullptr), |
156 length(0), | 156 length(0), |
157 bytes_used(0), | 157 bytes_used(0), |
158 at_device(false) { | 158 at_device(false) {} |
159 } | |
160 | 159 |
161 V4L2SliceVideoDecodeAccelerator::OutputRecord::OutputRecord() | 160 V4L2SliceVideoDecodeAccelerator::OutputRecord::OutputRecord() |
162 : at_device(false), | 161 : at_device(false), |
163 at_client(false), | 162 at_client(false), |
164 picture_id(-1), | 163 picture_id(-1), |
165 egl_image(EGL_NO_IMAGE_KHR), | 164 egl_image(EGL_NO_IMAGE_KHR), |
166 egl_sync(EGL_NO_SYNC_KHR), | 165 egl_sync(EGL_NO_SYNC_KHR), |
167 cleared(false) { | 166 cleared(false) {} |
168 } | |
169 | 167 |
170 struct V4L2SliceVideoDecodeAccelerator::BitstreamBufferRef { | 168 struct V4L2SliceVideoDecodeAccelerator::BitstreamBufferRef { |
171 BitstreamBufferRef( | 169 BitstreamBufferRef( |
172 base::WeakPtr<VideoDecodeAccelerator::Client>& client, | 170 base::WeakPtr<VideoDecodeAccelerator::Client>& client, |
173 const scoped_refptr<base::SingleThreadTaskRunner>& client_task_runner, | 171 const scoped_refptr<base::SingleThreadTaskRunner>& client_task_runner, |
174 SharedMemoryRegion* shm, | 172 SharedMemoryRegion* shm, |
175 int32_t input_id); | 173 int32_t input_id); |
176 ~BitstreamBufferRef(); | 174 ~BitstreamBufferRef(); |
177 const base::WeakPtr<VideoDecodeAccelerator::Client> client; | 175 const base::WeakPtr<VideoDecodeAccelerator::Client> client; |
178 const scoped_refptr<base::SingleThreadTaskRunner> client_task_runner; | 176 const scoped_refptr<base::SingleThreadTaskRunner> client_task_runner; |
(...skipping 26 matching lines...) Expand all Loading... | |
205 struct V4L2SliceVideoDecodeAccelerator::EGLSyncKHRRef { | 203 struct V4L2SliceVideoDecodeAccelerator::EGLSyncKHRRef { |
206 EGLSyncKHRRef(EGLDisplay egl_display, EGLSyncKHR egl_sync); | 204 EGLSyncKHRRef(EGLDisplay egl_display, EGLSyncKHR egl_sync); |
207 ~EGLSyncKHRRef(); | 205 ~EGLSyncKHRRef(); |
208 EGLDisplay const egl_display; | 206 EGLDisplay const egl_display; |
209 EGLSyncKHR egl_sync; | 207 EGLSyncKHR egl_sync; |
210 }; | 208 }; |
211 | 209 |
212 V4L2SliceVideoDecodeAccelerator::EGLSyncKHRRef::EGLSyncKHRRef( | 210 V4L2SliceVideoDecodeAccelerator::EGLSyncKHRRef::EGLSyncKHRRef( |
213 EGLDisplay egl_display, | 211 EGLDisplay egl_display, |
214 EGLSyncKHR egl_sync) | 212 EGLSyncKHR egl_sync) |
215 : egl_display(egl_display), egl_sync(egl_sync) { | 213 : egl_display(egl_display), egl_sync(egl_sync) {} |
216 } | |
217 | 214 |
218 V4L2SliceVideoDecodeAccelerator::EGLSyncKHRRef::~EGLSyncKHRRef() { | 215 V4L2SliceVideoDecodeAccelerator::EGLSyncKHRRef::~EGLSyncKHRRef() { |
219 // We don't check for eglDestroySyncKHR failures, because if we get here | 216 // We don't check for eglDestroySyncKHR failures, because if we get here |
220 // with a valid sync object, something went wrong and we are getting | 217 // with a valid sync object, something went wrong and we are getting |
221 // destroyed anyway. | 218 // destroyed anyway. |
222 if (egl_sync != EGL_NO_SYNC_KHR) | 219 if (egl_sync != EGL_NO_SYNC_KHR) |
223 eglDestroySyncKHR(egl_display, egl_sync); | 220 eglDestroySyncKHR(egl_display, egl_sync); |
224 } | 221 } |
225 | 222 |
226 struct V4L2SliceVideoDecodeAccelerator::PictureRecord { | 223 struct V4L2SliceVideoDecodeAccelerator::PictureRecord { |
227 PictureRecord(bool cleared, const media::Picture& picture); | 224 PictureRecord(bool cleared, const media::Picture& picture); |
228 ~PictureRecord(); | 225 ~PictureRecord(); |
229 bool cleared; // Whether the texture is cleared and safe to render from. | 226 bool cleared; // Whether the texture is cleared and safe to render from. |
230 media::Picture picture; // The decoded picture. | 227 media::Picture picture; // The decoded picture. |
231 }; | 228 }; |
232 | 229 |
233 V4L2SliceVideoDecodeAccelerator::PictureRecord::PictureRecord( | 230 V4L2SliceVideoDecodeAccelerator::PictureRecord::PictureRecord( |
234 bool cleared, | 231 bool cleared, |
235 const media::Picture& picture) | 232 const media::Picture& picture) |
236 : cleared(cleared), picture(picture) { | 233 : cleared(cleared), picture(picture) {} |
237 } | |
238 | 234 |
239 V4L2SliceVideoDecodeAccelerator::PictureRecord::~PictureRecord() { | 235 V4L2SliceVideoDecodeAccelerator::PictureRecord::~PictureRecord() {} |
240 } | |
241 | 236 |
242 class V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator | 237 class V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator |
243 : public H264Decoder::H264Accelerator { | 238 : public H264Decoder::H264Accelerator { |
244 public: | 239 public: |
245 V4L2H264Accelerator(V4L2SliceVideoDecodeAccelerator* v4l2_dec); | 240 V4L2H264Accelerator(V4L2SliceVideoDecodeAccelerator* v4l2_dec); |
246 ~V4L2H264Accelerator() override; | 241 ~V4L2H264Accelerator() override; |
247 | 242 |
248 // H264Decoder::H264Accelerator implementation. | 243 // H264Decoder::H264Accelerator implementation. |
249 scoped_refptr<H264Picture> CreateH264Picture() override; | 244 scoped_refptr<H264Picture> CreateH264Picture() override; |
250 | 245 |
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
316 | 311 |
317 V4L2SliceVideoDecodeAccelerator* v4l2_dec_; | 312 V4L2SliceVideoDecodeAccelerator* v4l2_dec_; |
318 | 313 |
319 DISALLOW_COPY_AND_ASSIGN(V4L2VP8Accelerator); | 314 DISALLOW_COPY_AND_ASSIGN(V4L2VP8Accelerator); |
320 }; | 315 }; |
321 | 316 |
322 // Codec-specific subclasses of software decoder picture classes. | 317 // Codec-specific subclasses of software decoder picture classes. |
323 // This allows us to keep decoders oblivious of our implementation details. | 318 // This allows us to keep decoders oblivious of our implementation details. |
324 class V4L2H264Picture : public H264Picture { | 319 class V4L2H264Picture : public H264Picture { |
325 public: | 320 public: |
326 V4L2H264Picture(const scoped_refptr< | 321 V4L2H264Picture( |
327 V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>& dec_surface); | 322 const scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>& |
323 dec_surface); | |
328 | 324 |
329 V4L2H264Picture* AsV4L2H264Picture() override { return this; } | 325 V4L2H264Picture* AsV4L2H264Picture() override { return this; } |
330 scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface> | 326 scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface> |
331 dec_surface() { | 327 dec_surface() { |
332 return dec_surface_; | 328 return dec_surface_; |
333 } | 329 } |
334 | 330 |
335 private: | 331 private: |
336 ~V4L2H264Picture() override; | 332 ~V4L2H264Picture() override; |
337 | 333 |
338 scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface> | 334 scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface> |
339 dec_surface_; | 335 dec_surface_; |
340 | 336 |
341 DISALLOW_COPY_AND_ASSIGN(V4L2H264Picture); | 337 DISALLOW_COPY_AND_ASSIGN(V4L2H264Picture); |
342 }; | 338 }; |
343 | 339 |
344 V4L2H264Picture::V4L2H264Picture(const scoped_refptr< | 340 V4L2H264Picture::V4L2H264Picture( |
345 V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>& dec_surface) | 341 const scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>& |
346 : dec_surface_(dec_surface) { | 342 dec_surface) |
347 } | 343 : dec_surface_(dec_surface) {} |
348 | 344 |
349 V4L2H264Picture::~V4L2H264Picture() { | 345 V4L2H264Picture::~V4L2H264Picture() {} |
350 } | |
351 | 346 |
352 class V4L2VP8Picture : public VP8Picture { | 347 class V4L2VP8Picture : public VP8Picture { |
353 public: | 348 public: |
354 V4L2VP8Picture(const scoped_refptr< | 349 V4L2VP8Picture( |
355 V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>& dec_surface); | 350 const scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>& |
351 dec_surface); | |
356 | 352 |
357 V4L2VP8Picture* AsV4L2VP8Picture() override { return this; } | 353 V4L2VP8Picture* AsV4L2VP8Picture() override { return this; } |
358 scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface> | 354 scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface> |
359 dec_surface() { | 355 dec_surface() { |
360 return dec_surface_; | 356 return dec_surface_; |
361 } | 357 } |
362 | 358 |
363 private: | 359 private: |
364 ~V4L2VP8Picture() override; | 360 ~V4L2VP8Picture() override; |
365 | 361 |
366 scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface> | 362 scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface> |
367 dec_surface_; | 363 dec_surface_; |
368 | 364 |
369 DISALLOW_COPY_AND_ASSIGN(V4L2VP8Picture); | 365 DISALLOW_COPY_AND_ASSIGN(V4L2VP8Picture); |
370 }; | 366 }; |
371 | 367 |
372 V4L2VP8Picture::V4L2VP8Picture(const scoped_refptr< | 368 V4L2VP8Picture::V4L2VP8Picture( |
373 V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>& dec_surface) | 369 const scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>& |
374 : dec_surface_(dec_surface) { | 370 dec_surface) |
375 } | 371 : dec_surface_(dec_surface) {} |
376 | 372 |
377 V4L2VP8Picture::~V4L2VP8Picture() { | 373 V4L2VP8Picture::~V4L2VP8Picture() {} |
378 } | |
379 | 374 |
380 V4L2SliceVideoDecodeAccelerator::V4L2SliceVideoDecodeAccelerator( | 375 V4L2SliceVideoDecodeAccelerator::V4L2SliceVideoDecodeAccelerator( |
381 const scoped_refptr<V4L2Device>& device, | 376 const scoped_refptr<V4L2Device>& device, |
382 EGLDisplay egl_display, | 377 EGLDisplay egl_display, |
383 const GetGLContextCallback& get_gl_context_cb, | 378 const GetGLContextCallback& get_gl_context_cb, |
384 const MakeGLContextCurrentCallback& make_context_current_cb) | 379 const MakeGLContextCurrentCallback& make_context_current_cb) |
385 : input_planes_count_(0), | 380 : input_planes_count_(0), |
386 output_planes_count_(0), | 381 output_planes_count_(0), |
387 child_task_runner_(base::ThreadTaskRunnerHandle::Get()), | 382 child_task_runner_(base::ThreadTaskRunnerHandle::Get()), |
388 device_(device), | 383 device_(device), |
(...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
501 LOG(ERROR) << "Initialize(): context does not have EGL_KHR_fence_sync"; | 496 LOG(ERROR) << "Initialize(): context does not have EGL_KHR_fence_sync"; |
502 return false; | 497 return false; |
503 } | 498 } |
504 | 499 |
505 // Capabilities check. | 500 // Capabilities check. |
506 struct v4l2_capability caps; | 501 struct v4l2_capability caps; |
507 const __u32 kCapsRequired = V4L2_CAP_VIDEO_M2M_MPLANE | V4L2_CAP_STREAMING; | 502 const __u32 kCapsRequired = V4L2_CAP_VIDEO_M2M_MPLANE | V4L2_CAP_STREAMING; |
508 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYCAP, &caps); | 503 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYCAP, &caps); |
509 if ((caps.capabilities & kCapsRequired) != kCapsRequired) { | 504 if ((caps.capabilities & kCapsRequired) != kCapsRequired) { |
510 LOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP" | 505 LOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP" |
511 ", caps check failed: 0x" << std::hex << caps.capabilities; | 506 ", caps check failed: 0x" |
507 << std::hex << caps.capabilities; | |
512 return false; | 508 return false; |
513 } | 509 } |
514 | 510 |
515 if (!SetupFormats()) | 511 if (!SetupFormats()) |
516 return false; | 512 return false; |
517 | 513 |
518 if (!decoder_thread_.Start()) { | 514 if (!decoder_thread_.Start()) { |
519 DLOG(ERROR) << "Initialize(): device thread failed to start"; | 515 DLOG(ERROR) << "Initialize(): device thread failed to start"; |
520 return false; | 516 return false; |
521 } | 517 } |
(...skipping 167 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
689 struct v4l2_plane planes[VIDEO_MAX_PLANES]; | 685 struct v4l2_plane planes[VIDEO_MAX_PLANES]; |
690 struct v4l2_buffer buffer; | 686 struct v4l2_buffer buffer; |
691 memset(&buffer, 0, sizeof(buffer)); | 687 memset(&buffer, 0, sizeof(buffer)); |
692 memset(planes, 0, sizeof(planes)); | 688 memset(planes, 0, sizeof(planes)); |
693 buffer.index = i; | 689 buffer.index = i; |
694 buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | 690 buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
695 buffer.memory = V4L2_MEMORY_MMAP; | 691 buffer.memory = V4L2_MEMORY_MMAP; |
696 buffer.m.planes = planes; | 692 buffer.m.planes = planes; |
697 buffer.length = input_planes_count_; | 693 buffer.length = input_planes_count_; |
698 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer); | 694 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer); |
699 void* address = device_->Mmap(nullptr, | 695 void* address = device_->Mmap(nullptr, buffer.m.planes[0].length, |
700 buffer.m.planes[0].length, | 696 PROT_READ | PROT_WRITE, MAP_SHARED, |
701 PROT_READ | PROT_WRITE, | |
702 MAP_SHARED, | |
703 buffer.m.planes[0].m.mem_offset); | 697 buffer.m.planes[0].m.mem_offset); |
704 if (address == MAP_FAILED) { | 698 if (address == MAP_FAILED) { |
705 PLOG(ERROR) << "CreateInputBuffers(): mmap() failed"; | 699 PLOG(ERROR) << "CreateInputBuffers(): mmap() failed"; |
706 return false; | 700 return false; |
707 } | 701 } |
708 input_buffer_map_[i].address = address; | 702 input_buffer_map_[i].address = address; |
709 input_buffer_map_[i].length = buffer.m.planes[0].length; | 703 input_buffer_map_[i].length = buffer.m.planes[0].length; |
710 } | 704 } |
711 | 705 |
712 return true; | 706 return true; |
(...skipping 135 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
848 } | 842 } |
849 | 843 |
850 DVLOGF(4) << "Scheduling device poll task"; | 844 DVLOGF(4) << "Scheduling device poll task"; |
851 | 845 |
852 device_poll_thread_.message_loop()->PostTask( | 846 device_poll_thread_.message_loop()->PostTask( |
853 FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::DevicePollTask, | 847 FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::DevicePollTask, |
854 base::Unretained(this), true)); | 848 base::Unretained(this), true)); |
855 | 849 |
856 DVLOGF(2) << "buffer counts: " | 850 DVLOGF(2) << "buffer counts: " |
857 << "INPUT[" << decoder_input_queue_.size() << "]" | 851 << "INPUT[" << decoder_input_queue_.size() << "]" |
858 << " => DEVICE[" | 852 << " => DEVICE[" << free_input_buffers_.size() << "+" |
859 << free_input_buffers_.size() << "+" | 853 << input_buffer_queued_count_ << "/" << input_buffer_map_.size() |
860 << input_buffer_queued_count_ << "/" | 854 << "]->[" << free_output_buffers_.size() << "+" |
861 << input_buffer_map_.size() << "]->[" | 855 << output_buffer_queued_count_ << "/" << output_buffer_map_.size() |
862 << free_output_buffers_.size() << "+" | 856 << "]" |
863 << output_buffer_queued_count_ << "/" | |
864 << output_buffer_map_.size() << "]" | |
865 << " => DISPLAYQ[" << decoder_display_queue_.size() << "]" | 857 << " => DISPLAYQ[" << decoder_display_queue_.size() << "]" |
866 << " => CLIENT[" << surfaces_at_display_.size() << "]"; | 858 << " => CLIENT[" << surfaces_at_display_.size() << "]"; |
867 } | 859 } |
868 | 860 |
869 void V4L2SliceVideoDecodeAccelerator::Enqueue( | 861 void V4L2SliceVideoDecodeAccelerator::Enqueue( |
870 const scoped_refptr<V4L2DecodeSurface>& dec_surface) { | 862 const scoped_refptr<V4L2DecodeSurface>& dec_surface) { |
871 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 863 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
872 | 864 |
873 const int old_inputs_queued = input_buffer_queued_count_; | 865 const int old_inputs_queued = input_buffer_queued_count_; |
874 const int old_outputs_queued = output_buffer_queued_count_; | 866 const int old_outputs_queued = output_buffer_queued_count_; |
875 | 867 |
876 if (!EnqueueInputRecord(dec_surface->input_record(), | 868 if (!EnqueueInputRecord(dec_surface->input_record(), |
877 dec_surface->config_store())) { | 869 dec_surface->config_store())) { |
878 DVLOGF(1) << "Failed queueing an input buffer"; | 870 DVLOGF(1) << "Failed queueing an input buffer"; |
879 NOTIFY_ERROR(PLATFORM_FAILURE); | 871 NOTIFY_ERROR(PLATFORM_FAILURE); |
880 return; | 872 return; |
881 } | 873 } |
882 | 874 |
883 if (!EnqueueOutputRecord(dec_surface->output_record())) { | 875 if (!EnqueueOutputRecord(dec_surface->output_record())) { |
884 DVLOGF(1) << "Failed queueing an output buffer"; | 876 DVLOGF(1) << "Failed queueing an output buffer"; |
885 NOTIFY_ERROR(PLATFORM_FAILURE); | 877 NOTIFY_ERROR(PLATFORM_FAILURE); |
886 return; | 878 return; |
887 } | 879 } |
888 | 880 |
889 bool inserted = | 881 bool inserted = |
890 surfaces_at_device_.insert(std::make_pair(dec_surface->output_record(), | 882 surfaces_at_device_ |
891 dec_surface)).second; | 883 .insert(std::make_pair(dec_surface->output_record(), dec_surface)) |
884 .second; | |
892 DCHECK(inserted); | 885 DCHECK(inserted); |
893 | 886 |
894 if (old_inputs_queued == 0 && old_outputs_queued == 0) | 887 if (old_inputs_queued == 0 && old_outputs_queued == 0) |
895 SchedulePollIfNeeded(); | 888 SchedulePollIfNeeded(); |
896 } | 889 } |
897 | 890 |
898 void V4L2SliceVideoDecodeAccelerator::Dequeue() { | 891 void V4L2SliceVideoDecodeAccelerator::Dequeue() { |
899 DVLOGF(3); | 892 DVLOGF(3); |
900 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 893 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
901 | 894 |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
941 break; | 934 break; |
942 } | 935 } |
943 PLOG(ERROR) << "ioctl() failed: VIDIOC_DQBUF"; | 936 PLOG(ERROR) << "ioctl() failed: VIDIOC_DQBUF"; |
944 NOTIFY_ERROR(PLATFORM_FAILURE); | 937 NOTIFY_ERROR(PLATFORM_FAILURE); |
945 return; | 938 return; |
946 } | 939 } |
947 OutputRecord& output_record = output_buffer_map_[dqbuf.index]; | 940 OutputRecord& output_record = output_buffer_map_[dqbuf.index]; |
948 DCHECK(output_record.at_device); | 941 DCHECK(output_record.at_device); |
949 output_record.at_device = false; | 942 output_record.at_device = false; |
950 output_buffer_queued_count_--; | 943 output_buffer_queued_count_--; |
951 DVLOGF(3) << "Dequeued output=" << dqbuf.index | 944 DVLOGF(3) << "Dequeued output=" << dqbuf.index << " count " |
952 << " count " << output_buffer_queued_count_; | 945 << output_buffer_queued_count_; |
953 | 946 |
954 V4L2DecodeSurfaceByOutputId::iterator it = | 947 V4L2DecodeSurfaceByOutputId::iterator it = |
955 surfaces_at_device_.find(dqbuf.index); | 948 surfaces_at_device_.find(dqbuf.index); |
956 if (it == surfaces_at_device_.end()) { | 949 if (it == surfaces_at_device_.end()) { |
957 DLOG(ERROR) << "Got invalid surface from device."; | 950 DLOG(ERROR) << "Got invalid surface from device."; |
958 NOTIFY_ERROR(PLATFORM_FAILURE); | 951 NOTIFY_ERROR(PLATFORM_FAILURE); |
959 } | 952 } |
960 | 953 |
961 it->second->SetDecoded(); | 954 it->second->SetDecoded(); |
962 surfaces_at_device_.erase(it); | 955 surfaces_at_device_.erase(it); |
(...skipping 21 matching lines...) Expand all Loading... | |
984 DVLOGF(4) << "Reusing input buffer, index=" << index; | 977 DVLOGF(4) << "Reusing input buffer, index=" << index; |
985 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 978 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
986 | 979 |
987 DCHECK_LT(index, static_cast<int>(input_buffer_map_.size())); | 980 DCHECK_LT(index, static_cast<int>(input_buffer_map_.size())); |
988 InputRecord& input_record = input_buffer_map_[index]; | 981 InputRecord& input_record = input_buffer_map_[index]; |
989 | 982 |
990 DCHECK(!input_record.at_device); | 983 DCHECK(!input_record.at_device); |
991 input_record.input_id = -1; | 984 input_record.input_id = -1; |
992 input_record.bytes_used = 0; | 985 input_record.bytes_used = 0; |
993 | 986 |
994 DCHECK_EQ(std::count(free_input_buffers_.begin(), free_input_buffers_.end(), | 987 DCHECK_EQ( |
995 index), 0); | 988 std::count(free_input_buffers_.begin(), free_input_buffers_.end(), index), |
989 0); | |
996 free_input_buffers_.push_back(index); | 990 free_input_buffers_.push_back(index); |
997 } | 991 } |
998 | 992 |
999 void V4L2SliceVideoDecodeAccelerator::ReuseOutputBuffer(int index) { | 993 void V4L2SliceVideoDecodeAccelerator::ReuseOutputBuffer(int index) { |
1000 DVLOGF(4) << "Reusing output buffer, index=" << index; | 994 DVLOGF(4) << "Reusing output buffer, index=" << index; |
1001 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 995 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
1002 | 996 |
1003 DCHECK_LT(index, static_cast<int>(output_buffer_map_.size())); | 997 DCHECK_LT(index, static_cast<int>(output_buffer_map_.size())); |
1004 OutputRecord& output_record = output_buffer_map_[index]; | 998 OutputRecord& output_record = output_buffer_map_[index]; |
1005 DCHECK(!output_record.at_device); | 999 DCHECK(!output_record.at_device); |
1006 DCHECK(!output_record.at_client); | 1000 DCHECK(!output_record.at_client); |
1007 | 1001 |
1008 DCHECK_EQ(std::count(free_output_buffers_.begin(), free_output_buffers_.end(), | 1002 DCHECK_EQ(std::count(free_output_buffers_.begin(), free_output_buffers_.end(), |
1009 index), 0); | 1003 index), |
1004 0); | |
1010 free_output_buffers_.push_back(index); | 1005 free_output_buffers_.push_back(index); |
1011 | 1006 |
1012 ScheduleDecodeBufferTaskIfNeeded(); | 1007 ScheduleDecodeBufferTaskIfNeeded(); |
1013 } | 1008 } |
1014 | 1009 |
1015 bool V4L2SliceVideoDecodeAccelerator::EnqueueInputRecord( | 1010 bool V4L2SliceVideoDecodeAccelerator::EnqueueInputRecord( |
1016 int index, | 1011 int index, |
1017 uint32_t config_store) { | 1012 uint32_t config_store) { |
1018 DVLOGF(3); | 1013 DVLOGF(3); |
1019 DCHECK_LT(index, static_cast<int>(input_buffer_map_.size())); | 1014 DCHECK_LT(index, static_cast<int>(input_buffer_map_.size())); |
(...skipping 423 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1443 | 1438 |
1444 void V4L2SliceVideoDecodeAccelerator::AssignPictureBuffers( | 1439 void V4L2SliceVideoDecodeAccelerator::AssignPictureBuffers( |
1445 const std::vector<media::PictureBuffer>& buffers) { | 1440 const std::vector<media::PictureBuffer>& buffers) { |
1446 DVLOGF(3); | 1441 DVLOGF(3); |
1447 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 1442 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
1448 | 1443 |
1449 const uint32_t req_buffer_count = decoder_->GetRequiredNumOfPictures(); | 1444 const uint32_t req_buffer_count = decoder_->GetRequiredNumOfPictures(); |
1450 | 1445 |
1451 if (buffers.size() < req_buffer_count) { | 1446 if (buffers.size() < req_buffer_count) { |
1452 DLOG(ERROR) << "Failed to provide requested picture buffers. " | 1447 DLOG(ERROR) << "Failed to provide requested picture buffers. " |
1453 << "(Got " << buffers.size() | 1448 << "(Got " << buffers.size() << ", requested " |
1454 << ", requested " << req_buffer_count << ")"; | 1449 << req_buffer_count << ")"; |
1455 NOTIFY_ERROR(INVALID_ARGUMENT); | 1450 NOTIFY_ERROR(INVALID_ARGUMENT); |
1456 return; | 1451 return; |
1457 } | 1452 } |
1458 | 1453 |
1459 gfx::GLContext* gl_context = get_gl_context_cb_.Run(); | 1454 gfx::GLContext* gl_context = get_gl_context_cb_.Run(); |
1460 if (!gl_context || !make_context_current_cb_.Run()) { | 1455 if (!gl_context || !make_context_current_cb_.Run()) { |
1461 DLOG(ERROR) << "No GL context"; | 1456 DLOG(ERROR) << "No GL context"; |
1462 NOTIFY_ERROR(PLATFORM_FAILURE); | 1457 NOTIFY_ERROR(PLATFORM_FAILURE); |
1463 return; | 1458 return; |
1464 } | 1459 } |
(...skipping 296 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1761 | 1756 |
1762 state_ = kError; | 1757 state_ = kError; |
1763 } | 1758 } |
1764 | 1759 |
1765 V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::V4L2H264Accelerator( | 1760 V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::V4L2H264Accelerator( |
1766 V4L2SliceVideoDecodeAccelerator* v4l2_dec) | 1761 V4L2SliceVideoDecodeAccelerator* v4l2_dec) |
1767 : num_slices_(0), v4l2_dec_(v4l2_dec) { | 1762 : num_slices_(0), v4l2_dec_(v4l2_dec) { |
1768 DCHECK(v4l2_dec_); | 1763 DCHECK(v4l2_dec_); |
1769 } | 1764 } |
1770 | 1765 |
1771 V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::~V4L2H264Accelerator() { | 1766 V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::~V4L2H264Accelerator() {} |
1772 } | |
1773 | 1767 |
1774 scoped_refptr<H264Picture> | 1768 scoped_refptr<H264Picture> |
1775 V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::CreateH264Picture() { | 1769 V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::CreateH264Picture() { |
1776 scoped_refptr<V4L2DecodeSurface> dec_surface = v4l2_dec_->CreateSurface(); | 1770 scoped_refptr<V4L2DecodeSurface> dec_surface = v4l2_dec_->CreateSurface(); |
1777 if (!dec_surface) | 1771 if (!dec_surface) |
1778 return nullptr; | 1772 return nullptr; |
1779 | 1773 |
1780 return new V4L2H264Picture(dec_surface); | 1774 return new V4L2H264Picture(dec_surface); |
1781 } | 1775 } |
1782 | 1776 |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1830 const H264Picture::Vector& ref_pic_listp0, | 1824 const H264Picture::Vector& ref_pic_listp0, |
1831 const H264Picture::Vector& ref_pic_listb0, | 1825 const H264Picture::Vector& ref_pic_listb0, |
1832 const H264Picture::Vector& ref_pic_listb1, | 1826 const H264Picture::Vector& ref_pic_listb1, |
1833 const scoped_refptr<H264Picture>& pic) { | 1827 const scoped_refptr<H264Picture>& pic) { |
1834 struct v4l2_ext_control ctrl; | 1828 struct v4l2_ext_control ctrl; |
1835 std::vector<struct v4l2_ext_control> ctrls; | 1829 std::vector<struct v4l2_ext_control> ctrls; |
1836 | 1830 |
1837 struct v4l2_ctrl_h264_sps v4l2_sps; | 1831 struct v4l2_ctrl_h264_sps v4l2_sps; |
1838 memset(&v4l2_sps, 0, sizeof(v4l2_sps)); | 1832 memset(&v4l2_sps, 0, sizeof(v4l2_sps)); |
1839 v4l2_sps.constraint_set_flags = | 1833 v4l2_sps.constraint_set_flags = |
1840 sps->constraint_set0_flag ? V4L2_H264_SPS_CONSTRAINT_SET0_FLAG : 0 | | 1834 sps->constraint_set0_flag |
1841 sps->constraint_set1_flag ? V4L2_H264_SPS_CONSTRAINT_SET1_FLAG : 0 | | 1835 ? V4L2_H264_SPS_CONSTRAINT_SET0_FLAG |
1842 sps->constraint_set2_flag ? V4L2_H264_SPS_CONSTRAINT_SET2_FLAG : 0 | | 1836 : 0 | sps->constraint_set1_flag |
1843 sps->constraint_set3_flag ? V4L2_H264_SPS_CONSTRAINT_SET3_FLAG : 0 | | 1837 ? V4L2_H264_SPS_CONSTRAINT_SET1_FLAG |
1844 sps->constraint_set4_flag ? V4L2_H264_SPS_CONSTRAINT_SET4_FLAG : 0 | | 1838 : 0 | sps->constraint_set2_flag |
1845 sps->constraint_set5_flag ? V4L2_H264_SPS_CONSTRAINT_SET5_FLAG : 0; | 1839 ? V4L2_H264_SPS_CONSTRAINT_SET2_FLAG |
1840 : 0 | sps->constraint_set3_flag | |
1841 ? V4L2_H264_SPS_CONSTRAINT_SET3_FLAG | |
1842 : 0 | sps->constraint_set4_flag | |
1843 ? V4L2_H264_SPS_CONSTRAINT_SET4_FLAG | |
1844 : 0 | sps->constraint_set5_flag | |
1845 ? V4L2_H264_SPS_CONSTRAINT_SET5_FLAG | |
1846 : 0; | |
1846 #define SPS_TO_V4L2SPS(a) v4l2_sps.a = sps->a | 1847 #define SPS_TO_V4L2SPS(a) v4l2_sps.a = sps->a |
1847 SPS_TO_V4L2SPS(profile_idc); | 1848 SPS_TO_V4L2SPS(profile_idc); |
1848 SPS_TO_V4L2SPS(level_idc); | 1849 SPS_TO_V4L2SPS(level_idc); |
1849 SPS_TO_V4L2SPS(seq_parameter_set_id); | 1850 SPS_TO_V4L2SPS(seq_parameter_set_id); |
1850 SPS_TO_V4L2SPS(chroma_format_idc); | 1851 SPS_TO_V4L2SPS(chroma_format_idc); |
1851 SPS_TO_V4L2SPS(bit_depth_luma_minus8); | 1852 SPS_TO_V4L2SPS(bit_depth_luma_minus8); |
1852 SPS_TO_V4L2SPS(bit_depth_chroma_minus8); | 1853 SPS_TO_V4L2SPS(bit_depth_chroma_minus8); |
1853 SPS_TO_V4L2SPS(log2_max_frame_num_minus4); | 1854 SPS_TO_V4L2SPS(log2_max_frame_num_minus4); |
1854 SPS_TO_V4L2SPS(pic_order_cnt_type); | 1855 SPS_TO_V4L2SPS(pic_order_cnt_type); |
1855 SPS_TO_V4L2SPS(log2_max_pic_order_cnt_lsb_minus4); | 1856 SPS_TO_V4L2SPS(log2_max_pic_order_cnt_lsb_minus4); |
1856 SPS_TO_V4L2SPS(offset_for_non_ref_pic); | 1857 SPS_TO_V4L2SPS(offset_for_non_ref_pic); |
1857 SPS_TO_V4L2SPS(offset_for_top_to_bottom_field); | 1858 SPS_TO_V4L2SPS(offset_for_top_to_bottom_field); |
1858 SPS_TO_V4L2SPS(num_ref_frames_in_pic_order_cnt_cycle); | 1859 SPS_TO_V4L2SPS(num_ref_frames_in_pic_order_cnt_cycle); |
1859 | 1860 |
1860 static_assert(arraysize(v4l2_sps.offset_for_ref_frame) == | 1861 static_assert(arraysize(v4l2_sps.offset_for_ref_frame) == |
1861 arraysize(sps->offset_for_ref_frame), | 1862 arraysize(sps->offset_for_ref_frame), |
1862 "offset_for_ref_frame arrays must be same size"); | 1863 "offset_for_ref_frame arrays must be same size"); |
1863 for (size_t i = 0; i < arraysize(v4l2_sps.offset_for_ref_frame); ++i) | 1864 for (size_t i = 0; i < arraysize(v4l2_sps.offset_for_ref_frame); ++i) |
1864 v4l2_sps.offset_for_ref_frame[i] = sps->offset_for_ref_frame[i]; | 1865 v4l2_sps.offset_for_ref_frame[i] = sps->offset_for_ref_frame[i]; |
1865 SPS_TO_V4L2SPS(max_num_ref_frames); | 1866 SPS_TO_V4L2SPS(max_num_ref_frames); |
1866 SPS_TO_V4L2SPS(pic_width_in_mbs_minus1); | 1867 SPS_TO_V4L2SPS(pic_width_in_mbs_minus1); |
1867 SPS_TO_V4L2SPS(pic_height_in_map_units_minus1); | 1868 SPS_TO_V4L2SPS(pic_height_in_map_units_minus1); |
1868 #undef SPS_TO_V4L2SPS | 1869 #undef SPS_TO_V4L2SPS |
1869 | 1870 |
1870 #define SET_V4L2_SPS_FLAG_IF(cond, flag) \ | 1871 #define SET_V4L2_SPS_FLAG_IF(cond, flag) \ |
1871 v4l2_sps.flags |= ((sps->cond) ? (flag) : 0) | 1872 v4l2_sps.flags |= ((sps->cond) ? (flag) : 0) |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1925 #undef SET_V4L2_PPS_FLAG_IF | 1926 #undef SET_V4L2_PPS_FLAG_IF |
1926 memset(&ctrl, 0, sizeof(ctrl)); | 1927 memset(&ctrl, 0, sizeof(ctrl)); |
1927 ctrl.id = V4L2_CID_MPEG_VIDEO_H264_PPS; | 1928 ctrl.id = V4L2_CID_MPEG_VIDEO_H264_PPS; |
1928 ctrl.size = sizeof(v4l2_pps); | 1929 ctrl.size = sizeof(v4l2_pps); |
1929 ctrl.p_h264_pps = &v4l2_pps; | 1930 ctrl.p_h264_pps = &v4l2_pps; |
1930 ctrls.push_back(ctrl); | 1931 ctrls.push_back(ctrl); |
1931 | 1932 |
1932 struct v4l2_ctrl_h264_scaling_matrix v4l2_scaling_matrix; | 1933 struct v4l2_ctrl_h264_scaling_matrix v4l2_scaling_matrix; |
1933 memset(&v4l2_scaling_matrix, 0, sizeof(v4l2_scaling_matrix)); | 1934 memset(&v4l2_scaling_matrix, 0, sizeof(v4l2_scaling_matrix)); |
1934 static_assert(arraysize(v4l2_scaling_matrix.scaling_list_4x4) <= | 1935 static_assert(arraysize(v4l2_scaling_matrix.scaling_list_4x4) <= |
1935 arraysize(pps->scaling_list4x4) && | 1936 arraysize(pps->scaling_list4x4) && |
Pawel Osciak
2016/04/19 09:22:55
Is this correct formatting?
Mark Dittmer
2016/05/02 03:51:23
Done.
| |
1936 arraysize(v4l2_scaling_matrix.scaling_list_4x4[0]) <= | 1937 arraysize(v4l2_scaling_matrix.scaling_list_4x4[0]) <= |
1937 arraysize(pps->scaling_list4x4[0]) && | 1938 arraysize(pps->scaling_list4x4[0]) && |
1938 arraysize(v4l2_scaling_matrix.scaling_list_8x8) <= | 1939 arraysize(v4l2_scaling_matrix.scaling_list_8x8) <= |
1939 arraysize(pps->scaling_list8x8) && | 1940 arraysize(pps->scaling_list8x8) && |
1940 arraysize(v4l2_scaling_matrix.scaling_list_8x8[0]) <= | 1941 arraysize(v4l2_scaling_matrix.scaling_list_8x8[0]) <= |
1941 arraysize(pps->scaling_list8x8[0]), | 1942 arraysize(pps->scaling_list8x8[0]), |
1942 "scaling_lists must be of correct size"); | 1943 "scaling_lists must be of correct size"); |
1943 for (size_t i = 0; i < arraysize(v4l2_scaling_matrix.scaling_list_4x4); ++i) { | 1944 for (size_t i = 0; i < arraysize(v4l2_scaling_matrix.scaling_list_4x4); ++i) { |
1944 for (size_t j = 0; j < arraysize(v4l2_scaling_matrix.scaling_list_4x4[i]); | 1945 for (size_t j = 0; j < arraysize(v4l2_scaling_matrix.scaling_list_4x4[i]); |
1945 ++j) { | 1946 ++j) { |
1946 v4l2_scaling_matrix.scaling_list_4x4[i][j] = pps->scaling_list4x4[i][j]; | 1947 v4l2_scaling_matrix.scaling_list_4x4[i][j] = pps->scaling_list4x4[i][j]; |
1947 } | 1948 } |
1948 } | 1949 } |
1949 for (size_t i = 0; i < arraysize(v4l2_scaling_matrix.scaling_list_8x8); ++i) { | 1950 for (size_t i = 0; i < arraysize(v4l2_scaling_matrix.scaling_list_8x8); ++i) { |
1950 for (size_t j = 0; j < arraysize(v4l2_scaling_matrix.scaling_list_8x8[i]); | 1951 for (size_t j = 0; j < arraysize(v4l2_scaling_matrix.scaling_list_8x8[i]); |
1951 ++j) { | 1952 ++j) { |
(...skipping 236 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2188 CHECK(v4l2_pic); | 2189 CHECK(v4l2_pic); |
2189 return v4l2_pic->dec_surface(); | 2190 return v4l2_pic->dec_surface(); |
2190 } | 2191 } |
2191 | 2192 |
2192 V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator::V4L2VP8Accelerator( | 2193 V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator::V4L2VP8Accelerator( |
2193 V4L2SliceVideoDecodeAccelerator* v4l2_dec) | 2194 V4L2SliceVideoDecodeAccelerator* v4l2_dec) |
2194 : v4l2_dec_(v4l2_dec) { | 2195 : v4l2_dec_(v4l2_dec) { |
2195 DCHECK(v4l2_dec_); | 2196 DCHECK(v4l2_dec_); |
2196 } | 2197 } |
2197 | 2198 |
2198 V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator::~V4L2VP8Accelerator() { | 2199 V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator::~V4L2VP8Accelerator() {} |
2199 } | |
2200 | 2200 |
2201 scoped_refptr<VP8Picture> | 2201 scoped_refptr<VP8Picture> |
2202 V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator::CreateVP8Picture() { | 2202 V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator::CreateVP8Picture() { |
2203 scoped_refptr<V4L2DecodeSurface> dec_surface = v4l2_dec_->CreateSurface(); | 2203 scoped_refptr<V4L2DecodeSurface> dec_surface = v4l2_dec_->CreateSurface(); |
2204 if (!dec_surface) | 2204 if (!dec_surface) |
2205 return nullptr; | 2205 return nullptr; |
2206 | 2206 |
2207 return new V4L2VP8Picture(dec_surface); | 2207 return new V4L2VP8Picture(dec_surface); |
2208 } | 2208 } |
2209 | 2209 |
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2271 | 2271 |
2272 static void FillV4L2EntropyHeader( | 2272 static void FillV4L2EntropyHeader( |
2273 const media::Vp8EntropyHeader& vp8_entropy_hdr, | 2273 const media::Vp8EntropyHeader& vp8_entropy_hdr, |
2274 struct v4l2_vp8_entropy_hdr* v4l2_entropy_hdr) { | 2274 struct v4l2_vp8_entropy_hdr* v4l2_entropy_hdr) { |
2275 ARRAY_MEMCPY_CHECKED(v4l2_entropy_hdr->coeff_probs, | 2275 ARRAY_MEMCPY_CHECKED(v4l2_entropy_hdr->coeff_probs, |
2276 vp8_entropy_hdr.coeff_probs); | 2276 vp8_entropy_hdr.coeff_probs); |
2277 ARRAY_MEMCPY_CHECKED(v4l2_entropy_hdr->y_mode_probs, | 2277 ARRAY_MEMCPY_CHECKED(v4l2_entropy_hdr->y_mode_probs, |
2278 vp8_entropy_hdr.y_mode_probs); | 2278 vp8_entropy_hdr.y_mode_probs); |
2279 ARRAY_MEMCPY_CHECKED(v4l2_entropy_hdr->uv_mode_probs, | 2279 ARRAY_MEMCPY_CHECKED(v4l2_entropy_hdr->uv_mode_probs, |
2280 vp8_entropy_hdr.uv_mode_probs); | 2280 vp8_entropy_hdr.uv_mode_probs); |
2281 ARRAY_MEMCPY_CHECKED(v4l2_entropy_hdr->mv_probs, | 2281 ARRAY_MEMCPY_CHECKED(v4l2_entropy_hdr->mv_probs, vp8_entropy_hdr.mv_probs); |
2282 vp8_entropy_hdr.mv_probs); | |
2283 } | 2282 } |
2284 | 2283 |
2285 bool V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator::SubmitDecode( | 2284 bool V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator::SubmitDecode( |
2286 const scoped_refptr<VP8Picture>& pic, | 2285 const scoped_refptr<VP8Picture>& pic, |
2287 const media::Vp8FrameHeader* frame_hdr, | 2286 const media::Vp8FrameHeader* frame_hdr, |
2288 const scoped_refptr<VP8Picture>& last_frame, | 2287 const scoped_refptr<VP8Picture>& last_frame, |
2289 const scoped_refptr<VP8Picture>& golden_frame, | 2288 const scoped_refptr<VP8Picture>& golden_frame, |
2290 const scoped_refptr<VP8Picture>& alt_frame) { | 2289 const scoped_refptr<VP8Picture>& alt_frame) { |
2291 struct v4l2_ctrl_vp8_frame_hdr v4l2_frame_hdr; | 2290 struct v4l2_ctrl_vp8_frame_hdr v4l2_frame_hdr; |
2292 memset(&v4l2_frame_hdr, 0, sizeof(v4l2_frame_hdr)); | 2291 memset(&v4l2_frame_hdr, 0, sizeof(v4l2_frame_hdr)); |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2330 | 2329 |
2331 v4l2_frame_hdr.first_part_size = | 2330 v4l2_frame_hdr.first_part_size = |
2332 base::checked_cast<__u32>(frame_hdr->first_part_size); | 2331 base::checked_cast<__u32>(frame_hdr->first_part_size); |
2333 v4l2_frame_hdr.first_part_offset = | 2332 v4l2_frame_hdr.first_part_offset = |
2334 base::checked_cast<__u32>(frame_hdr->first_part_offset); | 2333 base::checked_cast<__u32>(frame_hdr->first_part_offset); |
2335 v4l2_frame_hdr.macroblock_bit_offset = | 2334 v4l2_frame_hdr.macroblock_bit_offset = |
2336 base::checked_cast<__u32>(frame_hdr->macroblock_bit_offset); | 2335 base::checked_cast<__u32>(frame_hdr->macroblock_bit_offset); |
2337 v4l2_frame_hdr.num_dct_parts = frame_hdr->num_of_dct_partitions; | 2336 v4l2_frame_hdr.num_dct_parts = frame_hdr->num_of_dct_partitions; |
2338 | 2337 |
2339 static_assert(arraysize(v4l2_frame_hdr.dct_part_sizes) == | 2338 static_assert(arraysize(v4l2_frame_hdr.dct_part_sizes) == |
2340 arraysize(frame_hdr->dct_partition_sizes), | 2339 arraysize(frame_hdr->dct_partition_sizes), |
2341 "DCT partition size arrays must have equal number of elements"); | 2340 "DCT partition size arrays must have equal number of elements"); |
2342 for (size_t i = 0; i < frame_hdr->num_of_dct_partitions && | 2341 for (size_t i = 0; i < frame_hdr->num_of_dct_partitions && |
2343 i < arraysize(v4l2_frame_hdr.dct_part_sizes); ++i) | 2342 i < arraysize(v4l2_frame_hdr.dct_part_sizes); |
2343 ++i) | |
2344 v4l2_frame_hdr.dct_part_sizes[i] = frame_hdr->dct_partition_sizes[i]; | 2344 v4l2_frame_hdr.dct_part_sizes[i] = frame_hdr->dct_partition_sizes[i]; |
2345 | 2345 |
2346 scoped_refptr<V4L2DecodeSurface> dec_surface = | 2346 scoped_refptr<V4L2DecodeSurface> dec_surface = |
2347 VP8PictureToV4L2DecodeSurface(pic); | 2347 VP8PictureToV4L2DecodeSurface(pic); |
2348 std::vector<scoped_refptr<V4L2DecodeSurface>> ref_surfaces; | 2348 std::vector<scoped_refptr<V4L2DecodeSurface>> ref_surfaces; |
2349 | 2349 |
2350 if (last_frame) { | 2350 if (last_frame) { |
2351 scoped_refptr<V4L2DecodeSurface> last_frame_surface = | 2351 scoped_refptr<V4L2DecodeSurface> last_frame_surface = |
2352 VP8PictureToV4L2DecodeSurface(last_frame); | 2352 VP8PictureToV4L2DecodeSurface(last_frame); |
2353 v4l2_frame_hdr.last_frame = last_frame_surface->output_record(); | 2353 v4l2_frame_hdr.last_frame = last_frame_surface->output_record(); |
(...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2447 } | 2447 } |
2448 | 2448 |
2449 void V4L2SliceVideoDecodeAccelerator::OutputSurface( | 2449 void V4L2SliceVideoDecodeAccelerator::OutputSurface( |
2450 const scoped_refptr<V4L2DecodeSurface>& dec_surface) { | 2450 const scoped_refptr<V4L2DecodeSurface>& dec_surface) { |
2451 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 2451 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
2452 | 2452 |
2453 OutputRecord& output_record = | 2453 OutputRecord& output_record = |
2454 output_buffer_map_[dec_surface->output_record()]; | 2454 output_buffer_map_[dec_surface->output_record()]; |
2455 | 2455 |
2456 bool inserted = | 2456 bool inserted = |
2457 surfaces_at_display_.insert(std::make_pair(output_record.picture_id, | 2457 surfaces_at_display_ |
2458 dec_surface)).second; | 2458 .insert(std::make_pair(output_record.picture_id, dec_surface)) |
2459 .second; | |
2459 DCHECK(inserted); | 2460 DCHECK(inserted); |
2460 | 2461 |
2461 DCHECK(!output_record.at_client); | 2462 DCHECK(!output_record.at_client); |
2462 DCHECK(!output_record.at_device); | 2463 DCHECK(!output_record.at_device); |
2463 DCHECK_NE(output_record.egl_image, EGL_NO_IMAGE_KHR); | 2464 DCHECK_NE(output_record.egl_image, EGL_NO_IMAGE_KHR); |
2464 DCHECK_NE(output_record.picture_id, -1); | 2465 DCHECK_NE(output_record.picture_id, -1); |
2465 output_record.at_client = true; | 2466 output_record.at_client = true; |
2466 | 2467 |
2467 // TODO(posciak): Use visible size from decoder here instead | 2468 // TODO(posciak): Use visible size from decoder here instead |
2468 // (crbug.com/402760). Passing (0, 0) results in the client using the | 2469 // (crbug.com/402760). Passing (0, 0) results in the client using the |
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2570 media::VideoDecodeAccelerator::SupportedProfiles | 2571 media::VideoDecodeAccelerator::SupportedProfiles |
2571 V4L2SliceVideoDecodeAccelerator::GetSupportedProfiles() { | 2572 V4L2SliceVideoDecodeAccelerator::GetSupportedProfiles() { |
2572 scoped_refptr<V4L2Device> device = V4L2Device::Create(V4L2Device::kDecoder); | 2573 scoped_refptr<V4L2Device> device = V4L2Device::Create(V4L2Device::kDecoder); |
2573 if (!device) | 2574 if (!device) |
2574 return SupportedProfiles(); | 2575 return SupportedProfiles(); |
2575 | 2576 |
2576 return device->GetSupportedDecodeProfiles(arraysize(supported_input_fourccs_), | 2577 return device->GetSupportedDecodeProfiles(arraysize(supported_input_fourccs_), |
2577 supported_input_fourccs_); | 2578 supported_input_fourccs_); |
2578 } | 2579 } |
2579 | 2580 |
2580 } // namespace content | 2581 } // namespace media |
OLD | NEW |