Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(412)

Side by Side Diff: media/gpu/v4l2_video_decode_accelerator.cc

Issue 2896193002: v4l2_vda: Adjust the DVLOG levels. (Closed)
Patch Set: Change DVLOG to VLOG for level <= 2 Created 3 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 The Chromium Authors. All rights reserved. 1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "media/gpu/v4l2_video_decode_accelerator.h" 5 #include "media/gpu/v4l2_video_decode_accelerator.h"
6 6
7 #include <dlfcn.h> 7 #include <dlfcn.h>
8 #include <errno.h> 8 #include <errno.h>
9 #include <fcntl.h> 9 #include <fcntl.h>
10 #include <linux/videodev2.h> 10 #include <linux/videodev2.h>
(...skipping 15 matching lines...) Expand all
26 #include "media/base/media_switches.h" 26 #include "media/base/media_switches.h"
27 #include "media/filters/h264_parser.h" 27 #include "media/filters/h264_parser.h"
28 #include "media/gpu/shared_memory_region.h" 28 #include "media/gpu/shared_memory_region.h"
29 #include "ui/gfx/geometry/rect.h" 29 #include "ui/gfx/geometry/rect.h"
30 #include "ui/gl/gl_context.h" 30 #include "ui/gl/gl_context.h"
31 #include "ui/gl/scoped_binders.h" 31 #include "ui/gl/scoped_binders.h"
32 32
33 #define LOGF(level) LOG(level) << __func__ << "(): " 33 #define LOGF(level) LOG(level) << __func__ << "(): "
34 #define DLOGF(level) DLOG(level) << __func__ << "(): " 34 #define DLOGF(level) DLOG(level) << __func__ << "(): "
35 #define DVLOGF(level) DVLOG(level) << __func__ << "(): " 35 #define DVLOGF(level) DVLOG(level) << __func__ << "(): "
36 #define VLOGF(level) VLOG(level) << __func__ << "(): "
36 #define PLOGF(level) PLOG(level) << __func__ << "(): " 37 #define PLOGF(level) PLOG(level) << __func__ << "(): "
37 38
38 #define NOTIFY_ERROR(x) \ 39 #define NOTIFY_ERROR(x) \
39 do { \ 40 do { \
40 LOGF(ERROR) << "Setting error state:" << x; \ 41 LOGF(ERROR) << "Setting error state:" << x; \
41 SetErrorState(x); \ 42 SetErrorState(x); \
42 } while (0) 43 } while (0)
43 44
44 #define IOCTL_OR_ERROR_RETURN_VALUE(type, arg, value, type_str) \ 45 #define IOCTL_OR_ERROR_RETURN_VALUE(type, arg, value, type_str) \
45 do { \ 46 do { \
(...skipping 147 matching lines...) Expand 10 before | Expand all | Expand 10 after
193 DCHECK(!device_poll_thread_.IsRunning()); 194 DCHECK(!device_poll_thread_.IsRunning());
194 195
195 // These maps have members that should be manually destroyed, e.g. file 196 // These maps have members that should be manually destroyed, e.g. file
196 // descriptors, mmap() segments, etc. 197 // descriptors, mmap() segments, etc.
197 DCHECK(input_buffer_map_.empty()); 198 DCHECK(input_buffer_map_.empty());
198 DCHECK(output_buffer_map_.empty()); 199 DCHECK(output_buffer_map_.empty());
199 } 200 }
200 201
201 bool V4L2VideoDecodeAccelerator::Initialize(const Config& config, 202 bool V4L2VideoDecodeAccelerator::Initialize(const Config& config,
202 Client* client) { 203 Client* client) {
203 DVLOGF(3) << "profile: " << config.profile 204 VLOGF(2) << "profile: " << config.profile
204 << ", output_mode=" << static_cast<int>(config.output_mode); 205 << ", output_mode=" << static_cast<int>(config.output_mode);
205 DCHECK(child_task_runner_->BelongsToCurrentThread()); 206 DCHECK(child_task_runner_->BelongsToCurrentThread());
206 DCHECK_EQ(decoder_state_, kUninitialized); 207 DCHECK_EQ(decoder_state_, kUninitialized);
207 208
208 if (config.is_encrypted()) { 209 if (config.is_encrypted()) {
209 NOTREACHED() << "Encrypted streams are not supported for this VDA"; 210 NOTREACHED() << "Encrypted streams are not supported for this VDA";
210 return false; 211 return false;
211 } 212 }
212 213
213 if (config.output_mode != Config::OutputMode::ALLOCATE && 214 if (config.output_mode != Config::OutputMode::ALLOCATE &&
214 config.output_mode != Config::OutputMode::IMPORT) { 215 config.output_mode != Config::OutputMode::IMPORT) {
(...skipping 27 matching lines...) Expand all
242 } 243 }
243 244
244 // TODO(posciak): crbug.com/450898. 245 // TODO(posciak): crbug.com/450898.
245 #if defined(ARCH_CPU_ARMEL) 246 #if defined(ARCH_CPU_ARMEL)
246 if (!gl::g_driver_egl.ext.b_EGL_KHR_fence_sync) { 247 if (!gl::g_driver_egl.ext.b_EGL_KHR_fence_sync) {
247 LOGF(ERROR) << "context does not have EGL_KHR_fence_sync"; 248 LOGF(ERROR) << "context does not have EGL_KHR_fence_sync";
248 return false; 249 return false;
249 } 250 }
250 #endif 251 #endif
251 } else { 252 } else {
252 DVLOGF(1) << "No GL callbacks provided, initializing without GL support"; 253 VLOGF(2) << "No GL callbacks provided, initializing without GL support";
253 } 254 }
254 255
255 input_format_fourcc_ = 256 input_format_fourcc_ =
256 V4L2Device::VideoCodecProfileToV4L2PixFmt(video_profile_, false); 257 V4L2Device::VideoCodecProfileToV4L2PixFmt(video_profile_, false);
257 258
258 if (!device_->Open(V4L2Device::Type::kDecoder, input_format_fourcc_)) { 259 if (!device_->Open(V4L2Device::Type::kDecoder, input_format_fourcc_)) {
259 DVLOGF(1) << "Failed to open device for profile: " << config.profile 260 VLOGF(1) << "Failed to open device for profile: " << config.profile
260 << " fourcc: " << std::hex << "0x" << input_format_fourcc_; 261 << " fourcc: " << std::hex << "0x" << input_format_fourcc_;
261 return false; 262 return false;
262 } 263 }
263 264
264 // Capabilities check. 265 // Capabilities check.
265 struct v4l2_capability caps; 266 struct v4l2_capability caps;
266 const __u32 kCapsRequired = V4L2_CAP_VIDEO_M2M_MPLANE | V4L2_CAP_STREAMING; 267 const __u32 kCapsRequired = V4L2_CAP_VIDEO_M2M_MPLANE | V4L2_CAP_STREAMING;
267 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYCAP, &caps); 268 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYCAP, &caps);
268 if ((caps.capabilities & kCapsRequired) != kCapsRequired) { 269 if ((caps.capabilities & kCapsRequired) != kCapsRequired) {
269 LOGF(ERROR) << "ioctl() failed: VIDIOC_QUERYCAP" 270 LOGF(ERROR) << "ioctl() failed: VIDIOC_QUERYCAP"
270 << ", caps check failed: 0x" << std::hex << caps.capabilities; 271 << ", caps check failed: 0x" << std::hex << caps.capabilities;
(...skipping 17 matching lines...) Expand all
288 289
289 // InitializeTask will NOTIFY_ERROR on failure. 290 // InitializeTask will NOTIFY_ERROR on failure.
290 decoder_thread_.task_runner()->PostTask( 291 decoder_thread_.task_runner()->PostTask(
291 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::InitializeTask, 292 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::InitializeTask,
292 base::Unretained(this))); 293 base::Unretained(this)));
293 294
294 return true; 295 return true;
295 } 296 }
296 297
297 void V4L2VideoDecodeAccelerator::InitializeTask() { 298 void V4L2VideoDecodeAccelerator::InitializeTask() {
298 DVLOGF(3); 299 VLOGF(2);
299 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 300 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
300 DCHECK_EQ(decoder_state_, kInitialized); 301 DCHECK_EQ(decoder_state_, kInitialized);
301 302
302 // Subscribe to the resolution change event. 303 // Subscribe to the resolution change event.
303 struct v4l2_event_subscription sub; 304 struct v4l2_event_subscription sub;
304 memset(&sub, 0, sizeof(sub)); 305 memset(&sub, 0, sizeof(sub));
305 sub.type = V4L2_EVENT_SOURCE_CHANGE; 306 sub.type = V4L2_EVENT_SOURCE_CHANGE;
306 IOCTL_OR_ERROR_RETURN(VIDIOC_SUBSCRIBE_EVENT, &sub); 307 IOCTL_OR_ERROR_RETURN(VIDIOC_SUBSCRIBE_EVENT, &sub);
307 308
308 if (!CreateInputBuffers()) { 309 if (!CreateInputBuffers()) {
309 NOTIFY_ERROR(PLATFORM_FAILURE); 310 NOTIFY_ERROR(PLATFORM_FAILURE);
310 return; 311 return;
311 } 312 }
312 313
313 decoder_cmd_supported_ = IsDecoderCmdSupported(); 314 decoder_cmd_supported_ = IsDecoderCmdSupported();
314 315
315 if (!StartDevicePoll()) 316 if (!StartDevicePoll())
316 return; 317 return;
317 } 318 }
318 319
319 void V4L2VideoDecodeAccelerator::Decode( 320 void V4L2VideoDecodeAccelerator::Decode(
320 const BitstreamBuffer& bitstream_buffer) { 321 const BitstreamBuffer& bitstream_buffer) {
321 DVLOGF(1) << "input_id=" << bitstream_buffer.id() 322 DVLOGF(4) << "input_id=" << bitstream_buffer.id()
322 << ", size=" << bitstream_buffer.size(); 323 << ", size=" << bitstream_buffer.size();
323 DCHECK(decode_task_runner_->BelongsToCurrentThread()); 324 DCHECK(decode_task_runner_->BelongsToCurrentThread());
324 325
325 if (bitstream_buffer.id() < 0) { 326 if (bitstream_buffer.id() < 0) {
326 LOGF(ERROR) << "Invalid bitstream_buffer, id: " << bitstream_buffer.id(); 327 LOGF(ERROR) << "Invalid bitstream_buffer, id: " << bitstream_buffer.id();
327 if (base::SharedMemory::IsHandleValid(bitstream_buffer.handle())) 328 if (base::SharedMemory::IsHandleValid(bitstream_buffer.handle()))
328 base::SharedMemory::CloseHandle(bitstream_buffer.handle()); 329 base::SharedMemory::CloseHandle(bitstream_buffer.handle());
329 NOTIFY_ERROR(INVALID_ARGUMENT); 330 NOTIFY_ERROR(INVALID_ARGUMENT);
330 return; 331 return;
331 } 332 }
332 333
333 // DecodeTask() will take care of running a DecodeBufferTask(). 334 // DecodeTask() will take care of running a DecodeBufferTask().
334 decoder_thread_.task_runner()->PostTask( 335 decoder_thread_.task_runner()->PostTask(
335 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::DecodeTask, 336 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::DecodeTask,
336 base::Unretained(this), bitstream_buffer)); 337 base::Unretained(this), bitstream_buffer));
337 } 338 }
338 339
339 void V4L2VideoDecodeAccelerator::AssignPictureBuffers( 340 void V4L2VideoDecodeAccelerator::AssignPictureBuffers(
340 const std::vector<PictureBuffer>& buffers) { 341 const std::vector<PictureBuffer>& buffers) {
341 DVLOGF(3) << "buffer_count=" << buffers.size(); 342 VLOGF(2) << "buffer_count=" << buffers.size();
342 DCHECK(child_task_runner_->BelongsToCurrentThread()); 343 DCHECK(child_task_runner_->BelongsToCurrentThread());
343 344
344 decoder_thread_.task_runner()->PostTask( 345 decoder_thread_.task_runner()->PostTask(
345 FROM_HERE, 346 FROM_HERE,
346 base::Bind(&V4L2VideoDecodeAccelerator::AssignPictureBuffersTask, 347 base::Bind(&V4L2VideoDecodeAccelerator::AssignPictureBuffersTask,
347 base::Unretained(this), buffers)); 348 base::Unretained(this), buffers));
348 } 349 }
349 350
350 void V4L2VideoDecodeAccelerator::AssignPictureBuffersTask( 351 void V4L2VideoDecodeAccelerator::AssignPictureBuffersTask(
351 const std::vector<PictureBuffer>& buffers) { 352 const std::vector<PictureBuffer>& buffers) {
352 DVLOGF(3); 353 VLOGF(2);
353 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 354 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
354 DCHECK_EQ(decoder_state_, kAwaitingPictureBuffers); 355 DCHECK_EQ(decoder_state_, kAwaitingPictureBuffers);
355 356
356 uint32_t req_buffer_count = output_dpb_size_ + kDpbOutputBufferExtraCount; 357 uint32_t req_buffer_count = output_dpb_size_ + kDpbOutputBufferExtraCount;
357 if (image_processor_device_) 358 if (image_processor_device_)
358 req_buffer_count += kDpbOutputBufferExtraCountForImageProcessor; 359 req_buffer_count += kDpbOutputBufferExtraCountForImageProcessor;
359 360
360 if (buffers.size() < req_buffer_count) { 361 if (buffers.size() < req_buffer_count) {
361 LOGF(ERROR) << "Failed to provide requested picture buffers. (Got " 362 LOGF(ERROR) << "Failed to provide requested picture buffers. (Got "
362 << buffers.size() << ", requested " << req_buffer_count << ")"; 363 << buffers.size() << ", requested " << req_buffer_count << ")";
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
432 ImportBufferForPictureTask( 433 ImportBufferForPictureTask(
433 output_record.picture_id, std::move(dmabuf_fds), 434 output_record.picture_id, std::move(dmabuf_fds),
434 egl_image_size_.width() * plane_horiz_bits_per_pixel / 8); 435 egl_image_size_.width() * plane_horiz_bits_per_pixel / 8);
435 } // else we'll get triggered via ImportBufferForPicture() from client. 436 } // else we'll get triggered via ImportBufferForPicture() from client.
436 437
437 DVLOGF(3) << "buffer[" << i << "]: picture_id=" << output_record.picture_id; 438 DVLOGF(3) << "buffer[" << i << "]: picture_id=" << output_record.picture_id;
438 } 439 }
439 440
440 if (output_mode_ == Config::OutputMode::ALLOCATE) { 441 if (output_mode_ == Config::OutputMode::ALLOCATE) {
441 DCHECK_EQ(kAwaitingPictureBuffers, decoder_state_); 442 DCHECK_EQ(kAwaitingPictureBuffers, decoder_state_);
442 DVLOGF(1) << "Change state to kDecoding"; 443 DVLOGF(3) << "Change state to kDecoding";
443 decoder_state_ = kDecoding; 444 decoder_state_ = kDecoding;
444 if (reset_pending_) { 445 if (reset_pending_) {
445 FinishReset(); 446 FinishReset();
446 return; 447 return;
447 } 448 }
448 ScheduleDecodeBufferTaskIfNeeded(); 449 ScheduleDecodeBufferTaskIfNeeded();
449 } 450 }
450 } 451 }
451 452
452 void V4L2VideoDecodeAccelerator::CreateEGLImageFor( 453 void V4L2VideoDecodeAccelerator::CreateEGLImageFor(
(...skipping 150 matching lines...) Expand 10 before | Expand all | Expand 10 after
603 NOTIFY_ERROR(INVALID_ARGUMENT); 604 NOTIFY_ERROR(INVALID_ARGUMENT);
604 return; 605 return;
605 } 606 }
606 int adjusted_coded_width = stride * 8 / plane_horiz_bits_per_pixel; 607 int adjusted_coded_width = stride * 8 / plane_horiz_bits_per_pixel;
607 608
608 if (image_processor_device_ && !image_processor_) { 609 if (image_processor_device_ && !image_processor_) {
609 // This is the first buffer import. Create the image processor and change 610 // This is the first buffer import. Create the image processor and change
610 // the decoder state. The client may adjust the coded width. We don't have 611 // the decoder state. The client may adjust the coded width. We don't have
611 // the final coded size in AssignPictureBuffers yet. Use the adjusted coded 612 // the final coded size in AssignPictureBuffers yet. Use the adjusted coded
612 // width to create the image processor. 613 // width to create the image processor.
613 DVLOGF(3) << "Original egl_image_size=" << egl_image_size_.ToString() 614 VLOGF(2) << "Original egl_image_size=" << egl_image_size_.ToString()
614 << ", adjusted coded width=" << adjusted_coded_width; 615 << ", adjusted coded width=" << adjusted_coded_width;
615 DCHECK_GE(adjusted_coded_width, egl_image_size_.width()); 616 DCHECK_GE(adjusted_coded_width, egl_image_size_.width());
616 egl_image_size_.set_width(adjusted_coded_width); 617 egl_image_size_.set_width(adjusted_coded_width);
617 if (!CreateImageProcessor()) 618 if (!CreateImageProcessor())
618 return; 619 return;
619 DCHECK_EQ(kAwaitingPictureBuffers, decoder_state_); 620 DCHECK_EQ(kAwaitingPictureBuffers, decoder_state_);
620 DVLOGF(1) << "Change state to kDecoding"; 621 VLOGF(2) << "Change state to kDecoding";
621 decoder_state_ = kDecoding; 622 decoder_state_ = kDecoding;
622 if (reset_pending_) { 623 if (reset_pending_) {
623 FinishReset(); 624 FinishReset();
624 } 625 }
625 } else { 626 } else {
626 DCHECK_EQ(egl_image_size_.width(), adjusted_coded_width); 627 DCHECK_EQ(egl_image_size_.width(), adjusted_coded_width);
627 } 628 }
628 629
629 size_t index = iter - output_buffer_map_.begin(); 630 size_t index = iter - output_buffer_map_.begin();
630 DCHECK_EQ(std::count(free_output_buffers_.begin(), free_output_buffers_.end(), 631 DCHECK_EQ(std::count(free_output_buffers_.begin(), free_output_buffers_.end(),
(...skipping 20 matching lines...) Expand all
651 iter->processor_output_fds.swap(dmabuf_fds); 652 iter->processor_output_fds.swap(dmabuf_fds);
652 free_output_buffers_.push_back(index); 653 free_output_buffers_.push_back(index);
653 if (decoder_state_ != kChangingResolution) { 654 if (decoder_state_ != kChangingResolution) {
654 Enqueue(); 655 Enqueue();
655 ScheduleDecodeBufferTaskIfNeeded(); 656 ScheduleDecodeBufferTaskIfNeeded();
656 } 657 }
657 } 658 }
658 } 659 }
659 660
660 void V4L2VideoDecodeAccelerator::ReusePictureBuffer(int32_t picture_buffer_id) { 661 void V4L2VideoDecodeAccelerator::ReusePictureBuffer(int32_t picture_buffer_id) {
661 DVLOGF(3) << "picture_buffer_id=" << picture_buffer_id; 662 DVLOGF(4) << "picture_buffer_id=" << picture_buffer_id;
662 // Must be run on child thread, as we'll insert a sync in the EGL context. 663 // Must be run on child thread, as we'll insert a sync in the EGL context.
663 DCHECK(child_task_runner_->BelongsToCurrentThread()); 664 DCHECK(child_task_runner_->BelongsToCurrentThread());
664 665
665 std::unique_ptr<EGLSyncKHRRef> egl_sync_ref; 666 std::unique_ptr<EGLSyncKHRRef> egl_sync_ref;
666 667
667 if (!make_context_current_cb_.is_null()) { 668 if (!make_context_current_cb_.is_null()) {
668 if (!make_context_current_cb_.Run()) { 669 if (!make_context_current_cb_.Run()) {
669 LOGF(ERROR) << "could not make context current"; 670 LOGF(ERROR) << "could not make context current";
670 NOTIFY_ERROR(PLATFORM_FAILURE); 671 NOTIFY_ERROR(PLATFORM_FAILURE);
671 return; 672 return;
(...skipping 13 matching lines...) Expand all
685 egl_sync_ref.reset(new EGLSyncKHRRef(egl_display_, egl_sync)); 686 egl_sync_ref.reset(new EGLSyncKHRRef(egl_display_, egl_sync));
686 } 687 }
687 688
688 decoder_thread_.task_runner()->PostTask( 689 decoder_thread_.task_runner()->PostTask(
689 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::ReusePictureBufferTask, 690 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::ReusePictureBufferTask,
690 base::Unretained(this), picture_buffer_id, 691 base::Unretained(this), picture_buffer_id,
691 base::Passed(&egl_sync_ref))); 692 base::Passed(&egl_sync_ref)));
692 } 693 }
693 694
694 void V4L2VideoDecodeAccelerator::Flush() { 695 void V4L2VideoDecodeAccelerator::Flush() {
695 DVLOGF(3); 696 VLOGF(2);
696 DCHECK(child_task_runner_->BelongsToCurrentThread()); 697 DCHECK(child_task_runner_->BelongsToCurrentThread());
697 decoder_thread_.task_runner()->PostTask( 698 decoder_thread_.task_runner()->PostTask(
698 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::FlushTask, 699 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::FlushTask,
699 base::Unretained(this))); 700 base::Unretained(this)));
700 } 701 }
701 702
702 void V4L2VideoDecodeAccelerator::Reset() { 703 void V4L2VideoDecodeAccelerator::Reset() {
703 DVLOGF(3); 704 VLOGF(2);
704 DCHECK(child_task_runner_->BelongsToCurrentThread()); 705 DCHECK(child_task_runner_->BelongsToCurrentThread());
705 decoder_thread_.task_runner()->PostTask( 706 decoder_thread_.task_runner()->PostTask(
706 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::ResetTask, 707 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::ResetTask,
707 base::Unretained(this))); 708 base::Unretained(this)));
708 } 709 }
709 710
710 void V4L2VideoDecodeAccelerator::Destroy() { 711 void V4L2VideoDecodeAccelerator::Destroy() {
711 DVLOGF(3); 712 VLOGF(2);
712 DCHECK(child_task_runner_->BelongsToCurrentThread()); 713 DCHECK(child_task_runner_->BelongsToCurrentThread());
713 714
714 // We're destroying; cancel all callbacks. 715 // We're destroying; cancel all callbacks.
715 client_ptr_factory_.reset(); 716 client_ptr_factory_.reset();
716 weak_this_factory_.InvalidateWeakPtrs(); 717 weak_this_factory_.InvalidateWeakPtrs();
717 718
718 // If the decoder thread is running, destroy using posted task. 719 // If the decoder thread is running, destroy using posted task.
719 if (decoder_thread_.IsRunning()) { 720 if (decoder_thread_.IsRunning()) {
720 decoder_thread_.task_runner()->PostTask( 721 decoder_thread_.task_runner()->PostTask(
721 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::DestroyTask, 722 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::DestroyTask,
722 base::Unretained(this))); 723 base::Unretained(this)));
723 // DestroyTask() will cause the decoder_thread_ to flush all tasks. 724 // DestroyTask() will cause the decoder_thread_ to flush all tasks.
724 decoder_thread_.Stop(); 725 decoder_thread_.Stop();
725 } else { 726 } else {
726 // Otherwise, call the destroy task directly. 727 // Otherwise, call the destroy task directly.
727 DestroyTask(); 728 DestroyTask();
728 } 729 }
729 730
730 delete this; 731 delete this;
731 } 732 }
732 733
733 bool V4L2VideoDecodeAccelerator::TryToSetupDecodeOnSeparateThread( 734 bool V4L2VideoDecodeAccelerator::TryToSetupDecodeOnSeparateThread(
734 const base::WeakPtr<Client>& decode_client, 735 const base::WeakPtr<Client>& decode_client,
735 const scoped_refptr<base::SingleThreadTaskRunner>& decode_task_runner) { 736 const scoped_refptr<base::SingleThreadTaskRunner>& decode_task_runner) {
736 DVLOGF(2); 737 VLOGF(2);
737 decode_client_ = decode_client; 738 decode_client_ = decode_client;
738 decode_task_runner_ = decode_task_runner; 739 decode_task_runner_ = decode_task_runner;
739 return true; 740 return true;
740 } 741 }
741 742
742 // static 743 // static
743 VideoDecodeAccelerator::SupportedProfiles 744 VideoDecodeAccelerator::SupportedProfiles
744 V4L2VideoDecodeAccelerator::GetSupportedProfiles() { 745 V4L2VideoDecodeAccelerator::GetSupportedProfiles() {
745 scoped_refptr<V4L2Device> device = V4L2Device::Create(); 746 scoped_refptr<V4L2Device> device = V4L2Device::Create();
746 if (!device) 747 if (!device)
747 return SupportedProfiles(); 748 return SupportedProfiles();
748 749
749 return device->GetSupportedDecodeProfiles(arraysize(supported_input_fourccs_), 750 return device->GetSupportedDecodeProfiles(arraysize(supported_input_fourccs_),
750 supported_input_fourccs_); 751 supported_input_fourccs_);
751 } 752 }
752 753
753 void V4L2VideoDecodeAccelerator::DecodeTask( 754 void V4L2VideoDecodeAccelerator::DecodeTask(
754 const BitstreamBuffer& bitstream_buffer) { 755 const BitstreamBuffer& bitstream_buffer) {
755 DVLOGF(3) << "input_id=" << bitstream_buffer.id(); 756 DVLOGF(4) << "input_id=" << bitstream_buffer.id();
756 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 757 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
757 DCHECK_NE(decoder_state_, kUninitialized); 758 DCHECK_NE(decoder_state_, kUninitialized);
758 TRACE_EVENT1("Video Decoder", "V4L2VDA::DecodeTask", "input_id", 759 TRACE_EVENT1("Video Decoder", "V4L2VDA::DecodeTask", "input_id",
759 bitstream_buffer.id()); 760 bitstream_buffer.id());
760 761
761 std::unique_ptr<BitstreamBufferRef> bitstream_record(new BitstreamBufferRef( 762 std::unique_ptr<BitstreamBufferRef> bitstream_record(new BitstreamBufferRef(
762 decode_client_, decode_task_runner_, 763 decode_client_, decode_task_runner_,
763 std::unique_ptr<SharedMemoryRegion>( 764 std::unique_ptr<SharedMemoryRegion>(
764 new SharedMemoryRegion(bitstream_buffer, true)), 765 new SharedMemoryRegion(bitstream_buffer, true)),
765 bitstream_buffer.id())); 766 bitstream_buffer.id()));
766 767
767 // Skip empty buffer. 768 // Skip empty buffer.
768 if (bitstream_buffer.size() == 0) 769 if (bitstream_buffer.size() == 0)
769 return; 770 return;
770 771
771 if (!bitstream_record->shm->Map()) { 772 if (!bitstream_record->shm->Map()) {
772 LOGF(ERROR) << "could not map bitstream_buffer"; 773 LOGF(ERROR) << "could not map bitstream_buffer";
773 NOTIFY_ERROR(UNREADABLE_INPUT); 774 NOTIFY_ERROR(UNREADABLE_INPUT);
774 return; 775 return;
775 } 776 }
776 DVLOGF(3) << "mapped at=" << bitstream_record->shm->memory(); 777 DVLOGF(4) << "mapped at=" << bitstream_record->shm->memory();
777 778
778 if (decoder_state_ == kResetting || decoder_flushing_) { 779 if (decoder_state_ == kResetting || decoder_flushing_) {
779 // In the case that we're resetting or flushing, we need to delay decoding 780 // In the case that we're resetting or flushing, we need to delay decoding
780 // the BitstreamBuffers that come after the Reset() or Flush() call. When 781 // the BitstreamBuffers that come after the Reset() or Flush() call. When
781 // we're here, we know that this DecodeTask() was scheduled by a Decode() 782 // we're here, we know that this DecodeTask() was scheduled by a Decode()
782 // call that came after (in the client thread) the Reset() or Flush() call; 783 // call that came after (in the client thread) the Reset() or Flush() call;
783 // thus set up the delay if necessary. 784 // thus set up the delay if necessary.
784 if (decoder_delay_bitstream_buffer_id_ == -1) 785 if (decoder_delay_bitstream_buffer_id_ == -1)
785 decoder_delay_bitstream_buffer_id_ = bitstream_record->input_id; 786 decoder_delay_bitstream_buffer_id_ = bitstream_record->input_id;
786 } else if (decoder_state_ == kError) { 787 } else if (decoder_state_ == kError) {
787 DVLOGF(2) << "early out: kError state"; 788 VLOGF(2) << "early out: kError state";
788 return; 789 return;
789 } 790 }
790 791
791 decoder_input_queue_.push( 792 decoder_input_queue_.push(
792 linked_ptr<BitstreamBufferRef>(bitstream_record.release())); 793 linked_ptr<BitstreamBufferRef>(bitstream_record.release()));
793 decoder_decode_buffer_tasks_scheduled_++; 794 decoder_decode_buffer_tasks_scheduled_++;
794 DecodeBufferTask(); 795 DecodeBufferTask();
795 } 796 }
796 797
797 void V4L2VideoDecodeAccelerator::DecodeBufferTask() { 798 void V4L2VideoDecodeAccelerator::DecodeBufferTask() {
798 DVLOGF(3); 799 DVLOGF(4);
799 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 800 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
800 DCHECK_NE(decoder_state_, kUninitialized); 801 DCHECK_NE(decoder_state_, kUninitialized);
801 TRACE_EVENT0("Video Decoder", "V4L2VDA::DecodeBufferTask"); 802 TRACE_EVENT0("Video Decoder", "V4L2VDA::DecodeBufferTask");
802 803
803 decoder_decode_buffer_tasks_scheduled_--; 804 decoder_decode_buffer_tasks_scheduled_--;
804 805
805 if (decoder_state_ != kInitialized && decoder_state_ != kDecoding) { 806 if (decoder_state_ != kInitialized && decoder_state_ != kDecoding) {
806 DVLOGF(2) << "early out: state=" << decoder_state_; 807 VLOGF(2) << "early out: state=" << decoder_state_;
807 return; 808 return;
808 } 809 }
809 810
810 if (decoder_current_bitstream_buffer_ == NULL) { 811 if (decoder_current_bitstream_buffer_ == NULL) {
811 if (decoder_input_queue_.empty()) { 812 if (decoder_input_queue_.empty()) {
812 // We're waiting for a new buffer -- exit without scheduling a new task. 813 // We're waiting for a new buffer -- exit without scheduling a new task.
813 return; 814 return;
814 } 815 }
815 linked_ptr<BitstreamBufferRef>& buffer_ref = decoder_input_queue_.front(); 816 linked_ptr<BitstreamBufferRef>& buffer_ref = decoder_input_queue_.front();
816 if (decoder_delay_bitstream_buffer_id_ == buffer_ref->input_id) { 817 if (decoder_delay_bitstream_buffer_id_ == buffer_ref->input_id) {
817 // We're asked to delay decoding on this and subsequent buffers. 818 // We're asked to delay decoding on this and subsequent buffers.
818 return; 819 return;
819 } 820 }
820 821
821 // Setup to use the next buffer. 822 // Setup to use the next buffer.
822 decoder_current_bitstream_buffer_.reset(buffer_ref.release()); 823 decoder_current_bitstream_buffer_.reset(buffer_ref.release());
823 decoder_input_queue_.pop(); 824 decoder_input_queue_.pop();
824 const auto& shm = decoder_current_bitstream_buffer_->shm; 825 const auto& shm = decoder_current_bitstream_buffer_->shm;
825 if (shm) { 826 if (shm) {
826 DVLOGF(3) << "reading input_id=" 827 DVLOGF(4) << "reading input_id="
827 << decoder_current_bitstream_buffer_->input_id 828 << decoder_current_bitstream_buffer_->input_id
828 << ", addr=" << shm->memory() << ", size=" << shm->size(); 829 << ", addr=" << shm->memory() << ", size=" << shm->size();
829 } else { 830 } else {
830 DCHECK_EQ(decoder_current_bitstream_buffer_->input_id, kFlushBufferId); 831 DCHECK_EQ(decoder_current_bitstream_buffer_->input_id, kFlushBufferId);
831 DVLOGF(3) << "reading input_id=kFlushBufferId"; 832 DVLOGF(4) << "reading input_id=kFlushBufferId";
832 } 833 }
833 } 834 }
834 bool schedule_task = false; 835 bool schedule_task = false;
835 size_t decoded_size = 0; 836 size_t decoded_size = 0;
836 const auto& shm = decoder_current_bitstream_buffer_->shm; 837 const auto& shm = decoder_current_bitstream_buffer_->shm;
837 if (!shm) { 838 if (!shm) {
838 // This is a dummy buffer, queued to flush the pipe. Flush. 839 // This is a dummy buffer, queued to flush the pipe. Flush.
839 DCHECK_EQ(decoder_current_bitstream_buffer_->input_id, kFlushBufferId); 840 DCHECK_EQ(decoder_current_bitstream_buffer_->input_id, kFlushBufferId);
840 // Enqueue a buffer guaranteed to be empty. To do that, we flush the 841 // Enqueue a buffer guaranteed to be empty. To do that, we flush the
841 // current input, enqueue no data to the next frame, then flush that down. 842 // current input, enqueue no data to the next frame, then flush that down.
842 schedule_task = true; 843 schedule_task = true;
843 if (decoder_current_input_buffer_ != -1 && 844 if (decoder_current_input_buffer_ != -1 &&
844 input_buffer_map_[decoder_current_input_buffer_].input_id != 845 input_buffer_map_[decoder_current_input_buffer_].input_id !=
845 kFlushBufferId) 846 kFlushBufferId)
846 schedule_task = FlushInputFrame(); 847 schedule_task = FlushInputFrame();
847 848
848 if (schedule_task && AppendToInputFrame(NULL, 0) && FlushInputFrame()) { 849 if (schedule_task && AppendToInputFrame(NULL, 0) && FlushInputFrame()) {
849 DVLOGF(2) << "enqueued flush buffer"; 850 VLOGF(2) << "enqueued flush buffer";
850 decoder_partial_frame_pending_ = false; 851 decoder_partial_frame_pending_ = false;
851 schedule_task = true; 852 schedule_task = true;
852 } else { 853 } else {
853 // If we failed to enqueue the empty buffer (due to pipeline 854 // If we failed to enqueue the empty buffer (due to pipeline
854 // backpressure), don't advance the bitstream buffer queue, and don't 855 // backpressure), don't advance the bitstream buffer queue, and don't
855 // schedule the next task. This bitstream buffer queue entry will get 856 // schedule the next task. This bitstream buffer queue entry will get
856 // reprocessed when the pipeline frees up. 857 // reprocessed when the pipeline frees up.
857 schedule_task = false; 858 schedule_task = false;
858 } 859 }
859 } else if (shm->size() == 0) { 860 } else if (shm->size() == 0) {
(...skipping 30 matching lines...) Expand all
890 // Failed during decode. 891 // Failed during decode.
891 return; 892 return;
892 } 893 }
893 894
894 if (schedule_task) { 895 if (schedule_task) {
895 decoder_current_bitstream_buffer_->bytes_used += decoded_size; 896 decoder_current_bitstream_buffer_->bytes_used += decoded_size;
896 if ((shm ? shm->size() : 0) == 897 if ((shm ? shm->size() : 0) ==
897 decoder_current_bitstream_buffer_->bytes_used) { 898 decoder_current_bitstream_buffer_->bytes_used) {
898 // Our current bitstream buffer is done; return it. 899 // Our current bitstream buffer is done; return it.
899 int32_t input_id = decoder_current_bitstream_buffer_->input_id; 900 int32_t input_id = decoder_current_bitstream_buffer_->input_id;
900 DVLOGF(3) << "finished input_id=" << input_id; 901 DVLOGF(4) << "finished input_id=" << input_id;
901 // BitstreamBufferRef destructor calls NotifyEndOfBitstreamBuffer(). 902 // BitstreamBufferRef destructor calls NotifyEndOfBitstreamBuffer().
902 decoder_current_bitstream_buffer_.reset(); 903 decoder_current_bitstream_buffer_.reset();
903 } 904 }
904 ScheduleDecodeBufferTaskIfNeeded(); 905 ScheduleDecodeBufferTaskIfNeeded();
905 } 906 }
906 } 907 }
907 908
908 bool V4L2VideoDecodeAccelerator::AdvanceFrameFragment(const uint8_t* data, 909 bool V4L2VideoDecodeAccelerator::AdvanceFrameFragment(const uint8_t* data,
909 size_t size, 910 size_t size,
910 size_t* endpos) { 911 size_t* endpos) {
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after
1002 decoder_decode_buffer_tasks_scheduled_++; 1003 decoder_decode_buffer_tasks_scheduled_++;
1003 decoder_thread_.task_runner()->PostTask( 1004 decoder_thread_.task_runner()->PostTask(
1004 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::DecodeBufferTask, 1005 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::DecodeBufferTask,
1005 base::Unretained(this))); 1006 base::Unretained(this)));
1006 } 1007 }
1007 } 1008 }
1008 1009
1009 bool V4L2VideoDecodeAccelerator::DecodeBufferInitial(const void* data, 1010 bool V4L2VideoDecodeAccelerator::DecodeBufferInitial(const void* data,
1010 size_t size, 1011 size_t size,
1011 size_t* endpos) { 1012 size_t* endpos) {
1012 DVLOGF(3) << "data=" << data << ", size=" << size; 1013 DVLOGF(4) << "data=" << data << ", size=" << size;
1013 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 1014 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1014 DCHECK_EQ(decoder_state_, kInitialized); 1015 DCHECK_EQ(decoder_state_, kInitialized);
1015 // Initial decode. We haven't been able to get output stream format info yet. 1016 // Initial decode. We haven't been able to get output stream format info yet.
1016 // Get it, and start decoding. 1017 // Get it, and start decoding.
1017 1018
1018 // Copy in and send to HW. 1019 // Copy in and send to HW.
1019 if (!AppendToInputFrame(data, size)) 1020 if (!AppendToInputFrame(data, size))
1020 return false; 1021 return false;
1021 1022
1022 // If we only have a partial frame, don't flush and process yet. 1023 // If we only have a partial frame, don't flush and process yet.
(...skipping 15 matching lines...) Expand all
1038 1039
1039 *endpos = size; 1040 *endpos = size;
1040 1041
1041 if (again) { 1042 if (again) {
1042 // Need more stream to decode format, return true and schedule next buffer. 1043 // Need more stream to decode format, return true and schedule next buffer.
1043 return true; 1044 return true;
1044 } 1045 }
1045 1046
1046 // Run this initialization only on first startup. 1047 // Run this initialization only on first startup.
1047 if (output_buffer_map_.empty()) { 1048 if (output_buffer_map_.empty()) {
1048 DVLOGF(3) << "running initialization"; 1049 DVLOGF(4) << "running initialization";
1049 // Success! Setup our parameters. 1050 // Success! Setup our parameters.
1050 if (!CreateBuffersForFormat(format, visible_size)) 1051 if (!CreateBuffersForFormat(format, visible_size))
1051 return false; 1052 return false;
1052 // We are waiting for AssignPictureBuffers. Do not set the state to 1053 // We are waiting for AssignPictureBuffers. Do not set the state to
1053 // kDecoding. 1054 // kDecoding.
1054 } else { 1055 } else {
1055 decoder_state_ = kDecoding; 1056 decoder_state_ = kDecoding;
1056 ScheduleDecodeBufferTaskIfNeeded(); 1057 ScheduleDecodeBufferTaskIfNeeded();
1057 } 1058 }
1058 return true; 1059 return true;
1059 } 1060 }
1060 1061
1061 bool V4L2VideoDecodeAccelerator::DecodeBufferContinue(const void* data, 1062 bool V4L2VideoDecodeAccelerator::DecodeBufferContinue(const void* data,
1062 size_t size) { 1063 size_t size) {
1063 DVLOGF(3) << "data=" << data << ", size=" << size; 1064 DVLOGF(4) << "data=" << data << ", size=" << size;
1064 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 1065 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1065 DCHECK_EQ(decoder_state_, kDecoding); 1066 DCHECK_EQ(decoder_state_, kDecoding);
1066 1067
1067 // Both of these calls will set kError state if they fail. 1068 // Both of these calls will set kError state if they fail.
1068 // Only flush the frame if it's complete. 1069 // Only flush the frame if it's complete.
1069 return (AppendToInputFrame(data, size) && 1070 return (AppendToInputFrame(data, size) &&
1070 (decoder_partial_frame_pending_ || FlushInputFrame())); 1071 (decoder_partial_frame_pending_ || FlushInputFrame()));
1071 } 1072 }
1072 1073
1073 bool V4L2VideoDecodeAccelerator::AppendToInputFrame(const void* data, 1074 bool V4L2VideoDecodeAccelerator::AppendToInputFrame(const void* data,
1074 size_t size) { 1075 size_t size) {
1075 DVLOGF(3); 1076 DVLOGF(4);
1076 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 1077 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1077 DCHECK_NE(decoder_state_, kUninitialized); 1078 DCHECK_NE(decoder_state_, kUninitialized);
1078 DCHECK_NE(decoder_state_, kResetting); 1079 DCHECK_NE(decoder_state_, kResetting);
1079 DCHECK_NE(decoder_state_, kError); 1080 DCHECK_NE(decoder_state_, kError);
1080 // This routine can handle data == NULL and size == 0, which occurs when 1081 // This routine can handle data == NULL and size == 0, which occurs when
1081 // we queue an empty buffer for the purposes of flushing the pipe. 1082 // we queue an empty buffer for the purposes of flushing the pipe.
1082 1083
1083 // Flush if we're too big 1084 // Flush if we're too big
1084 if (decoder_current_input_buffer_ != -1) { 1085 if (decoder_current_input_buffer_ != -1) {
1085 InputRecord& input_record = 1086 InputRecord& input_record =
1086 input_buffer_map_[decoder_current_input_buffer_]; 1087 input_buffer_map_[decoder_current_input_buffer_];
1087 if (input_record.bytes_used + size > input_record.length) { 1088 if (input_record.bytes_used + size > input_record.length) {
1088 if (!FlushInputFrame()) 1089 if (!FlushInputFrame())
1089 return false; 1090 return false;
1090 decoder_current_input_buffer_ = -1; 1091 decoder_current_input_buffer_ = -1;
1091 } 1092 }
1092 } 1093 }
1093 1094
1094 // Try to get an available input buffer 1095 // Try to get an available input buffer
1095 if (decoder_current_input_buffer_ == -1) { 1096 if (decoder_current_input_buffer_ == -1) {
1096 if (free_input_buffers_.empty()) { 1097 if (free_input_buffers_.empty()) {
1097 // See if we can get more free buffers from HW 1098 // See if we can get more free buffers from HW
1098 Dequeue(); 1099 Dequeue();
1099 if (free_input_buffers_.empty()) { 1100 if (free_input_buffers_.empty()) {
1100 // Nope! 1101 // Nope!
1101 DVLOGF(2) << "stalled for input buffers"; 1102 DVLOGF(3) << "stalled for input buffers";
1102 return false; 1103 return false;
1103 } 1104 }
1104 } 1105 }
1105 decoder_current_input_buffer_ = free_input_buffers_.back(); 1106 decoder_current_input_buffer_ = free_input_buffers_.back();
1106 free_input_buffers_.pop_back(); 1107 free_input_buffers_.pop_back();
1107 InputRecord& input_record = 1108 InputRecord& input_record =
1108 input_buffer_map_[decoder_current_input_buffer_]; 1109 input_buffer_map_[decoder_current_input_buffer_];
1109 DCHECK_EQ(input_record.bytes_used, 0); 1110 DCHECK_EQ(input_record.bytes_used, 0);
1110 DCHECK_EQ(input_record.input_id, -1); 1111 DCHECK_EQ(input_record.input_id, -1);
1111 DCHECK(decoder_current_bitstream_buffer_ != NULL); 1112 DCHECK(decoder_current_bitstream_buffer_ != NULL);
(...skipping 17 matching lines...) Expand all
1129 } 1130 }
1130 memcpy(reinterpret_cast<uint8_t*>(input_record.address) + 1131 memcpy(reinterpret_cast<uint8_t*>(input_record.address) +
1131 input_record.bytes_used, 1132 input_record.bytes_used,
1132 data, size); 1133 data, size);
1133 input_record.bytes_used += size; 1134 input_record.bytes_used += size;
1134 1135
1135 return true; 1136 return true;
1136 } 1137 }
1137 1138
1138 bool V4L2VideoDecodeAccelerator::FlushInputFrame() { 1139 bool V4L2VideoDecodeAccelerator::FlushInputFrame() {
1139 DVLOGF(3); 1140 DVLOGF(4);
1140 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 1141 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1141 DCHECK_NE(decoder_state_, kUninitialized); 1142 DCHECK_NE(decoder_state_, kUninitialized);
1142 DCHECK_NE(decoder_state_, kResetting); 1143 DCHECK_NE(decoder_state_, kResetting);
1143 DCHECK_NE(decoder_state_, kError); 1144 DCHECK_NE(decoder_state_, kError);
1144 1145
1145 if (decoder_current_input_buffer_ == -1) 1146 if (decoder_current_input_buffer_ == -1)
1146 return true; 1147 return true;
1147 1148
1148 InputRecord& input_record = input_buffer_map_[decoder_current_input_buffer_]; 1149 InputRecord& input_record = input_buffer_map_[decoder_current_input_buffer_];
1149 DCHECK_NE(input_record.input_id, -1); 1150 DCHECK_NE(input_record.input_id, -1);
1150 DCHECK(input_record.input_id != kFlushBufferId || 1151 DCHECK(input_record.input_id != kFlushBufferId ||
1151 input_record.bytes_used == 0); 1152 input_record.bytes_used == 0);
1152 // * if input_id >= 0, this input buffer was prompted by a bitstream buffer we 1153 // * if input_id >= 0, this input buffer was prompted by a bitstream buffer we
1153 // got from the client. We can skip it if it is empty. 1154 // got from the client. We can skip it if it is empty.
1154 // * if input_id < 0 (should be kFlushBufferId in this case), this input 1155 // * if input_id < 0 (should be kFlushBufferId in this case), this input
1155 // buffer was prompted by a flush buffer, and should be queued even when 1156 // buffer was prompted by a flush buffer, and should be queued even when
1156 // empty. 1157 // empty.
1157 if (input_record.input_id >= 0 && input_record.bytes_used == 0) { 1158 if (input_record.input_id >= 0 && input_record.bytes_used == 0) {
1158 input_record.input_id = -1; 1159 input_record.input_id = -1;
1159 free_input_buffers_.push_back(decoder_current_input_buffer_); 1160 free_input_buffers_.push_back(decoder_current_input_buffer_);
1160 decoder_current_input_buffer_ = -1; 1161 decoder_current_input_buffer_ = -1;
1161 return true; 1162 return true;
1162 } 1163 }
1163 1164
1164 // Queue it. 1165 // Queue it.
1165 input_ready_queue_.push(decoder_current_input_buffer_); 1166 input_ready_queue_.push(decoder_current_input_buffer_);
1166 decoder_current_input_buffer_ = -1; 1167 decoder_current_input_buffer_ = -1;
1167 DVLOGF(3) << "submitting input_id=" << input_record.input_id; 1168 DVLOGF(4) << "submitting input_id=" << input_record.input_id;
1168 // Enqueue once since there's new available input for it. 1169 // Enqueue once since there's new available input for it.
1169 Enqueue(); 1170 Enqueue();
1170 1171
1171 return (decoder_state_ != kError); 1172 return (decoder_state_ != kError);
1172 } 1173 }
1173 1174
1174 void V4L2VideoDecodeAccelerator::ServiceDeviceTask(bool event_pending) { 1175 void V4L2VideoDecodeAccelerator::ServiceDeviceTask(bool event_pending) {
1175 DVLOGF(3); 1176 DVLOGF(3);
1176 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 1177 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1177 DCHECK_NE(decoder_state_, kUninitialized); 1178 DCHECK_NE(decoder_state_, kUninitialized);
1178 TRACE_EVENT0("Video Decoder", "V4L2VDA::ServiceDeviceTask"); 1179 TRACE_EVENT0("Video Decoder", "V4L2VDA::ServiceDeviceTask");
1179 1180
1180 if (decoder_state_ == kResetting) { 1181 if (decoder_state_ == kResetting) {
1181 DVLOGF(2) << "early out: kResetting state"; 1182 DVLOGF(3) << "early out: kResetting state";
1182 return; 1183 return;
1183 } else if (decoder_state_ == kError) { 1184 } else if (decoder_state_ == kError) {
1184 DVLOGF(2) << "early out: kError state"; 1185 DVLOGF(3) << "early out: kError state";
1185 return; 1186 return;
1186 } else if (decoder_state_ == kChangingResolution) { 1187 } else if (decoder_state_ == kChangingResolution) {
1187 DVLOGF(2) << "early out: kChangingResolution state"; 1188 DVLOGF(3) << "early out: kChangingResolution state";
1188 return; 1189 return;
1189 } 1190 }
1190 1191
1191 bool resolution_change_pending = false; 1192 bool resolution_change_pending = false;
1192 if (event_pending) 1193 if (event_pending)
1193 resolution_change_pending = DequeueResolutionChangeEvent(); 1194 resolution_change_pending = DequeueResolutionChangeEvent();
1194 Dequeue(); 1195 Dequeue();
1195 Enqueue(); 1196 Enqueue();
1196 1197
1197 // Clear the interrupt fd. 1198 // Clear the interrupt fd.
(...skipping 13 matching lines...) Expand all
1211 // * device_poll_thread_ is running normally 1212 // * device_poll_thread_ is running normally
1212 // * device_poll_thread_ scheduled us, but then a ResetTask() or DestroyTask() 1213 // * device_poll_thread_ scheduled us, but then a ResetTask() or DestroyTask()
1213 // shut it down, in which case we're either in kResetting or kError states 1214 // shut it down, in which case we're either in kResetting or kError states
1214 // respectively, and we should have early-outed already. 1215 // respectively, and we should have early-outed already.
1215 DCHECK(device_poll_thread_.message_loop()); 1216 DCHECK(device_poll_thread_.message_loop());
1216 // Queue the DevicePollTask() now. 1217 // Queue the DevicePollTask() now.
1217 device_poll_thread_.task_runner()->PostTask( 1218 device_poll_thread_.task_runner()->PostTask(
1218 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::DevicePollTask, 1219 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::DevicePollTask,
1219 base::Unretained(this), poll_device)); 1220 base::Unretained(this), poll_device));
1220 1221
1221 DVLOG(1) << "ServiceDeviceTask(): buffer counts: DEC[" 1222 DVLOG(3) << "ServiceDeviceTask(): buffer counts: DEC["
1222 << decoder_input_queue_.size() << "->" 1223 << decoder_input_queue_.size() << "->"
1223 << input_ready_queue_.size() << "] => DEVICE[" 1224 << input_ready_queue_.size() << "] => DEVICE["
1224 << free_input_buffers_.size() << "+" 1225 << free_input_buffers_.size() << "+"
1225 << input_buffer_queued_count_ << "/" 1226 << input_buffer_queued_count_ << "/"
1226 << input_buffer_map_.size() << "->" 1227 << input_buffer_map_.size() << "->"
1227 << free_output_buffers_.size() << "+" 1228 << free_output_buffers_.size() << "+"
1228 << output_buffer_queued_count_ << "/" 1229 << output_buffer_queued_count_ << "/"
1229 << output_buffer_map_.size() << "] => PROCESSOR[" 1230 << output_buffer_map_.size() << "] => PROCESSOR["
1230 << image_processor_bitstream_buffer_ids_.size() << "] => CLIENT[" 1231 << image_processor_bitstream_buffer_ids_.size() << "] => CLIENT["
1231 << decoder_frames_at_client_ << "]"; 1232 << decoder_frames_at_client_ << "]";
1232 1233
1233 ScheduleDecodeBufferTaskIfNeeded(); 1234 ScheduleDecodeBufferTaskIfNeeded();
1234 if (resolution_change_pending) 1235 if (resolution_change_pending)
1235 StartResolutionChange(); 1236 StartResolutionChange();
1236 } 1237 }
1237 1238
1238 void V4L2VideoDecodeAccelerator::Enqueue() { 1239 void V4L2VideoDecodeAccelerator::Enqueue() {
1239 DVLOGF(3); 1240 DVLOGF(4);
1240 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 1241 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1241 DCHECK_NE(decoder_state_, kUninitialized); 1242 DCHECK_NE(decoder_state_, kUninitialized);
1242 TRACE_EVENT0("Video Decoder", "V4L2VDA::Enqueue"); 1243 TRACE_EVENT0("Video Decoder", "V4L2VDA::Enqueue");
1243 1244
1244 // Drain the pipe of completed decode buffers. 1245 // Drain the pipe of completed decode buffers.
1245 const int old_inputs_queued = input_buffer_queued_count_; 1246 const int old_inputs_queued = input_buffer_queued_count_;
1246 while (!input_ready_queue_.empty()) { 1247 while (!input_ready_queue_.empty()) {
1247 const int buffer = input_ready_queue_.front(); 1248 const int buffer = input_ready_queue_.front();
1248 InputRecord& input_record = input_buffer_map_[buffer]; 1249 InputRecord& input_record = input_buffer_map_[buffer];
1249 if (input_record.input_id == kFlushBufferId && decoder_cmd_supported_) { 1250 if (input_record.input_id == kFlushBufferId && decoder_cmd_supported_) {
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
1304 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 1305 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1305 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type); 1306 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type);
1306 output_streamon_ = true; 1307 output_streamon_ = true;
1307 } 1308 }
1308 } 1309 }
1309 } 1310 }
1310 1311
1311 bool V4L2VideoDecodeAccelerator::DequeueResolutionChangeEvent() { 1312 bool V4L2VideoDecodeAccelerator::DequeueResolutionChangeEvent() {
1312 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 1313 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1313 DCHECK_NE(decoder_state_, kUninitialized); 1314 DCHECK_NE(decoder_state_, kUninitialized);
1314 DVLOGF(3); 1315 VLOGF(2);
1315 1316
1316 struct v4l2_event ev; 1317 struct v4l2_event ev;
1317 memset(&ev, 0, sizeof(ev)); 1318 memset(&ev, 0, sizeof(ev));
1318 1319
1319 while (device_->Ioctl(VIDIOC_DQEVENT, &ev) == 0) { 1320 while (device_->Ioctl(VIDIOC_DQEVENT, &ev) == 0) {
1320 if (ev.type == V4L2_EVENT_SOURCE_CHANGE) { 1321 if (ev.type == V4L2_EVENT_SOURCE_CHANGE) {
1321 if (ev.u.src_change.changes & V4L2_EVENT_SRC_CH_RESOLUTION) { 1322 if (ev.u.src_change.changes & V4L2_EVENT_SRC_CH_RESOLUTION) {
1322 DVLOGF(3) << "got resolution change event."; 1323 VLOGF(2) << "got resolution change event.";
1323 return true; 1324 return true;
1324 } 1325 }
1325 } else { 1326 } else {
1326 LOGF(ERROR) << "got an event (" << ev.type 1327 LOGF(ERROR) << "got an event (" << ev.type
1327 << ") we haven't subscribed to."; 1328 << ") we haven't subscribed to.";
1328 } 1329 }
1329 } 1330 }
1330 return false; 1331 return false;
1331 } 1332 }
1332 1333
1333 void V4L2VideoDecodeAccelerator::Dequeue() { 1334 void V4L2VideoDecodeAccelerator::Dequeue() {
1334 DVLOGF(3); 1335 DVLOGF(4);
1335 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 1336 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1336 DCHECK_NE(decoder_state_, kUninitialized); 1337 DCHECK_NE(decoder_state_, kUninitialized);
1337 TRACE_EVENT0("Video Decoder", "V4L2VDA::Dequeue"); 1338 TRACE_EVENT0("Video Decoder", "V4L2VDA::Dequeue");
1338 1339
1339 while (input_buffer_queued_count_ > 0) { 1340 while (input_buffer_queued_count_ > 0) {
1340 if (!DequeueInputBuffer()) 1341 if (!DequeueInputBuffer())
1341 break; 1342 break;
1342 } 1343 }
1343 while (output_buffer_queued_count_ > 0) { 1344 while (output_buffer_queued_count_ > 0) {
1344 if (!DequeueOutputBuffer()) 1345 if (!DequeueOutputBuffer())
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
1414 DCHECK_EQ(output_record.state, kAtDevice); 1415 DCHECK_EQ(output_record.state, kAtDevice);
1415 DCHECK_NE(output_record.picture_id, -1); 1416 DCHECK_NE(output_record.picture_id, -1);
1416 output_buffer_queued_count_--; 1417 output_buffer_queued_count_--;
1417 if (dqbuf.m.planes[0].bytesused == 0) { 1418 if (dqbuf.m.planes[0].bytesused == 0) {
1418 // This is an empty output buffer returned as part of a flush. 1419 // This is an empty output buffer returned as part of a flush.
1419 output_record.state = kFree; 1420 output_record.state = kFree;
1420 free_output_buffers_.push_back(dqbuf.index); 1421 free_output_buffers_.push_back(dqbuf.index);
1421 } else { 1422 } else {
1422 int32_t bitstream_buffer_id = dqbuf.timestamp.tv_sec; 1423 int32_t bitstream_buffer_id = dqbuf.timestamp.tv_sec;
1423 DCHECK_GE(bitstream_buffer_id, 0); 1424 DCHECK_GE(bitstream_buffer_id, 0);
1424 DVLOGF(3) << "Dequeue output buffer: dqbuf index=" << dqbuf.index 1425 DVLOGF(4) << "Dequeue output buffer: dqbuf index=" << dqbuf.index
1425 << " bitstream input_id=" << bitstream_buffer_id; 1426 << " bitstream input_id=" << bitstream_buffer_id;
1426 if (image_processor_device_) { 1427 if (image_processor_device_) {
1427 if (!ProcessFrame(bitstream_buffer_id, dqbuf.index)) { 1428 if (!ProcessFrame(bitstream_buffer_id, dqbuf.index)) {
1428 DLOGF(ERROR) << "Processing frame failed"; 1429 DLOGF(ERROR) << "Processing frame failed";
1429 NOTIFY_ERROR(PLATFORM_FAILURE); 1430 NOTIFY_ERROR(PLATFORM_FAILURE);
1430 return false; 1431 return false;
1431 } 1432 }
1432 } else { 1433 } else {
1433 output_record.state = kAtClient; 1434 output_record.state = kAtClient;
1434 decoder_frames_at_client_++; 1435 decoder_frames_at_client_++;
(...skipping 14 matching lines...) Expand all
1449 struct v4l2_decoder_cmd cmd; 1450 struct v4l2_decoder_cmd cmd;
1450 memset(&cmd, 0, sizeof(cmd)); 1451 memset(&cmd, 0, sizeof(cmd));
1451 cmd.cmd = V4L2_DEC_CMD_START; 1452 cmd.cmd = V4L2_DEC_CMD_START;
1452 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_DECODER_CMD, &cmd); 1453 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_DECODER_CMD, &cmd);
1453 } 1454 }
1454 } 1455 }
1455 return true; 1456 return true;
1456 } 1457 }
1457 1458
1458 bool V4L2VideoDecodeAccelerator::EnqueueInputRecord() { 1459 bool V4L2VideoDecodeAccelerator::EnqueueInputRecord() {
1459 DVLOGF(3); 1460 DVLOGF(4);
1460 DCHECK(!input_ready_queue_.empty()); 1461 DCHECK(!input_ready_queue_.empty());
1461 1462
1462 // Enqueue an input (VIDEO_OUTPUT) buffer. 1463 // Enqueue an input (VIDEO_OUTPUT) buffer.
1463 const int buffer = input_ready_queue_.front(); 1464 const int buffer = input_ready_queue_.front();
1464 InputRecord& input_record = input_buffer_map_[buffer]; 1465 InputRecord& input_record = input_buffer_map_[buffer];
1465 DCHECK(!input_record.at_device); 1466 DCHECK(!input_record.at_device);
1466 struct v4l2_buffer qbuf; 1467 struct v4l2_buffer qbuf;
1467 struct v4l2_plane qbuf_plane; 1468 struct v4l2_plane qbuf_plane;
1468 memset(&qbuf, 0, sizeof(qbuf)); 1469 memset(&qbuf, 0, sizeof(qbuf));
1469 memset(&qbuf_plane, 0, sizeof(qbuf_plane)); 1470 memset(&qbuf_plane, 0, sizeof(qbuf_plane));
1470 qbuf.index = buffer; 1471 qbuf.index = buffer;
1471 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; 1472 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1472 qbuf.timestamp.tv_sec = input_record.input_id; 1473 qbuf.timestamp.tv_sec = input_record.input_id;
1473 qbuf.memory = V4L2_MEMORY_MMAP; 1474 qbuf.memory = V4L2_MEMORY_MMAP;
1474 qbuf.m.planes = &qbuf_plane; 1475 qbuf.m.planes = &qbuf_plane;
1475 qbuf.m.planes[0].bytesused = input_record.bytes_used; 1476 qbuf.m.planes[0].bytesused = input_record.bytes_used;
1476 qbuf.length = 1; 1477 qbuf.length = 1;
1477 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf); 1478 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
1478 input_ready_queue_.pop(); 1479 input_ready_queue_.pop();
1479 input_record.at_device = true; 1480 input_record.at_device = true;
1480 input_buffer_queued_count_++; 1481 input_buffer_queued_count_++;
1481 DVLOGF(3) << "enqueued input_id=" << input_record.input_id 1482 DVLOGF(4) << "enqueued input_id=" << input_record.input_id
1482 << " size=" << input_record.bytes_used; 1483 << " size=" << input_record.bytes_used;
1483 return true; 1484 return true;
1484 } 1485 }
1485 1486
1486 bool V4L2VideoDecodeAccelerator::EnqueueOutputRecord() { 1487 bool V4L2VideoDecodeAccelerator::EnqueueOutputRecord() {
1487 DCHECK(!free_output_buffers_.empty()); 1488 DCHECK(!free_output_buffers_.empty());
1488 1489
1489 // Enqueue an output (VIDEO_CAPTURE) buffer. 1490 // Enqueue an output (VIDEO_CAPTURE) buffer.
1490 const int buffer = free_output_buffers_.front(); 1491 const int buffer = free_output_buffers_.front();
1491 DVLOGF(3) << "buffer " << buffer; 1492 DVLOGF(4) << "buffer " << buffer;
1492 OutputRecord& output_record = output_buffer_map_[buffer]; 1493 OutputRecord& output_record = output_buffer_map_[buffer];
1493 DCHECK_EQ(output_record.state, kFree); 1494 DCHECK_EQ(output_record.state, kFree);
1494 DCHECK_NE(output_record.picture_id, -1); 1495 DCHECK_NE(output_record.picture_id, -1);
1495 if (output_record.egl_sync != EGL_NO_SYNC_KHR) { 1496 if (output_record.egl_sync != EGL_NO_SYNC_KHR) {
1496 TRACE_EVENT0("Video Decoder", 1497 TRACE_EVENT0("Video Decoder",
1497 "V4L2VDA::EnqueueOutputRecord: eglClientWaitSyncKHR"); 1498 "V4L2VDA::EnqueueOutputRecord: eglClientWaitSyncKHR");
1498 // If we have to wait for completion, wait. Note that 1499 // If we have to wait for completion, wait. Note that
1499 // free_output_buffers_ is a FIFO queue, so we always wait on the 1500 // free_output_buffers_ is a FIFO queue, so we always wait on the
1500 // buffer that has been in the queue the longest. 1501 // buffer that has been in the queue the longest.
1501 if (eglClientWaitSyncKHR(egl_display_, output_record.egl_sync, 0, 1502 if (eglClientWaitSyncKHR(egl_display_, output_record.egl_sync, 0,
(...skipping 12 matching lines...) Expand all
1514 std::unique_ptr<struct v4l2_plane[]> qbuf_planes( 1515 std::unique_ptr<struct v4l2_plane[]> qbuf_planes(
1515 new v4l2_plane[output_planes_count_]); 1516 new v4l2_plane[output_planes_count_]);
1516 memset(&qbuf, 0, sizeof(qbuf)); 1517 memset(&qbuf, 0, sizeof(qbuf));
1517 memset(qbuf_planes.get(), 0, 1518 memset(qbuf_planes.get(), 0,
1518 sizeof(struct v4l2_plane) * output_planes_count_); 1519 sizeof(struct v4l2_plane) * output_planes_count_);
1519 qbuf.index = buffer; 1520 qbuf.index = buffer;
1520 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 1521 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1521 qbuf.memory = V4L2_MEMORY_MMAP; 1522 qbuf.memory = V4L2_MEMORY_MMAP;
1522 qbuf.m.planes = qbuf_planes.get(); 1523 qbuf.m.planes = qbuf_planes.get();
1523 qbuf.length = output_planes_count_; 1524 qbuf.length = output_planes_count_;
1524 DVLOGF(2) << "qbuf.index=" << qbuf.index; 1525 DVLOGF(4) << "qbuf.index=" << qbuf.index;
1525 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf); 1526 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
1526 free_output_buffers_.pop_front(); 1527 free_output_buffers_.pop_front();
1527 output_record.state = kAtDevice; 1528 output_record.state = kAtDevice;
1528 output_buffer_queued_count_++; 1529 output_buffer_queued_count_++;
1529 return true; 1530 return true;
1530 } 1531 }
1531 1532
1532 void V4L2VideoDecodeAccelerator::ReusePictureBufferTask( 1533 void V4L2VideoDecodeAccelerator::ReusePictureBufferTask(
1533 int32_t picture_buffer_id, 1534 int32_t picture_buffer_id,
1534 std::unique_ptr<EGLSyncKHRRef> egl_sync_ref) { 1535 std::unique_ptr<EGLSyncKHRRef> egl_sync_ref) {
1535 DVLOGF(3) << "picture_buffer_id=" << picture_buffer_id; 1536 DVLOGF(4) << "picture_buffer_id=" << picture_buffer_id;
1536 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 1537 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1537 TRACE_EVENT0("Video Decoder", "V4L2VDA::ReusePictureBufferTask"); 1538 TRACE_EVENT0("Video Decoder", "V4L2VDA::ReusePictureBufferTask");
1538 1539
1539 // We run ReusePictureBufferTask even if we're in kResetting. 1540 // We run ReusePictureBufferTask even if we're in kResetting.
1540 if (decoder_state_ == kError) { 1541 if (decoder_state_ == kError) {
1541 DVLOGF(2) << "early out: kError state"; 1542 DVLOGF(4) << "early out: kError state";
1542 return; 1543 return;
1543 } 1544 }
1544 1545
1545 if (decoder_state_ == kChangingResolution) { 1546 if (decoder_state_ == kChangingResolution) {
1546 DVLOGF(2) << "early out: kChangingResolution"; 1547 DVLOGF(4) << "early out: kChangingResolution";
1547 return; 1548 return;
1548 } 1549 }
1549 1550
1550 size_t index; 1551 size_t index;
1551 for (index = 0; index < output_buffer_map_.size(); ++index) 1552 for (index = 0; index < output_buffer_map_.size(); ++index)
1552 if (output_buffer_map_[index].picture_id == picture_buffer_id) 1553 if (output_buffer_map_[index].picture_id == picture_buffer_id)
1553 break; 1554 break;
1554 1555
1555 if (index >= output_buffer_map_.size()) { 1556 if (index >= output_buffer_map_.size()) {
1556 // It's possible that we've already posted a DismissPictureBuffer for this 1557 // It's possible that we've already posted a DismissPictureBuffer for this
(...skipping 20 matching lines...) Expand all
1577 if (egl_sync_ref) { 1578 if (egl_sync_ref) {
1578 output_record.egl_sync = egl_sync_ref->egl_sync; 1579 output_record.egl_sync = egl_sync_ref->egl_sync;
1579 // Take ownership of the EGLSync. 1580 // Take ownership of the EGLSync.
1580 egl_sync_ref->egl_sync = EGL_NO_SYNC_KHR; 1581 egl_sync_ref->egl_sync = EGL_NO_SYNC_KHR;
1581 } 1582 }
1582 // We got a buffer back, so enqueue it back. 1583 // We got a buffer back, so enqueue it back.
1583 Enqueue(); 1584 Enqueue();
1584 } 1585 }
1585 1586
1586 void V4L2VideoDecodeAccelerator::FlushTask() { 1587 void V4L2VideoDecodeAccelerator::FlushTask() {
1587 DVLOGF(3); 1588 VLOGF(2);
1588 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 1589 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1589 TRACE_EVENT0("Video Decoder", "V4L2VDA::FlushTask"); 1590 TRACE_EVENT0("Video Decoder", "V4L2VDA::FlushTask");
1590 1591
1591 // Flush outstanding buffers. 1592 // Flush outstanding buffers.
1592 if (decoder_state_ == kInitialized) { 1593 if (decoder_state_ == kInitialized) {
1593 // There's nothing in the pipe, so return done immediately. 1594 // There's nothing in the pipe, so return done immediately.
1594 DVLOGF(3) << "returning flush"; 1595 VLOGF(2) << "returning flush";
1595 child_task_runner_->PostTask(FROM_HERE, 1596 child_task_runner_->PostTask(FROM_HERE,
1596 base::Bind(&Client::NotifyFlushDone, client_)); 1597 base::Bind(&Client::NotifyFlushDone, client_));
1597 return; 1598 return;
1598 } else if (decoder_state_ == kError) { 1599 } else if (decoder_state_ == kError) {
1599 DVLOGF(2) << "early out: kError state"; 1600 VLOGF(2) << "early out: kError state";
1600 return; 1601 return;
1601 } 1602 }
1602 1603
1603 // We don't support stacked flushing. 1604 // We don't support stacked flushing.
1604 DCHECK(!decoder_flushing_); 1605 DCHECK(!decoder_flushing_);
1605 1606
1606 // Queue up an empty buffer -- this triggers the flush. 1607 // Queue up an empty buffer -- this triggers the flush.
1607 decoder_input_queue_.push( 1608 decoder_input_queue_.push(
1608 linked_ptr<BitstreamBufferRef>(new BitstreamBufferRef( 1609 linked_ptr<BitstreamBufferRef>(new BitstreamBufferRef(
1609 decode_client_, decode_task_runner_, nullptr, kFlushBufferId))); 1610 decode_client_, decode_task_runner_, nullptr, kFlushBufferId)));
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
1658 // For now, do the streamoff-streamon cycle to satisfy Exynos and not freeze 1659 // For now, do the streamoff-streamon cycle to satisfy Exynos and not freeze
1659 // when doing MSE. This should be harmless otherwise. 1660 // when doing MSE. This should be harmless otherwise.
1660 if (!(StopDevicePoll() && StopOutputStream() && StopInputStream())) 1661 if (!(StopDevicePoll() && StopOutputStream() && StopInputStream()))
1661 return; 1662 return;
1662 1663
1663 if (!StartDevicePoll()) 1664 if (!StartDevicePoll())
1664 return; 1665 return;
1665 1666
1666 decoder_delay_bitstream_buffer_id_ = -1; 1667 decoder_delay_bitstream_buffer_id_ = -1;
1667 decoder_flushing_ = false; 1668 decoder_flushing_ = false;
1668 DVLOGF(3) << "returning flush"; 1669 VLOGF(2) << "returning flush";
1669 child_task_runner_->PostTask(FROM_HERE, 1670 child_task_runner_->PostTask(FROM_HERE,
1670 base::Bind(&Client::NotifyFlushDone, client_)); 1671 base::Bind(&Client::NotifyFlushDone, client_));
1671 1672
1672 // While we were flushing, we early-outed DecodeBufferTask()s. 1673 // While we were flushing, we early-outed DecodeBufferTask()s.
1673 ScheduleDecodeBufferTaskIfNeeded(); 1674 ScheduleDecodeBufferTaskIfNeeded();
1674 } 1675 }
1675 1676
1676 bool V4L2VideoDecodeAccelerator::IsDecoderCmdSupported() { 1677 bool V4L2VideoDecodeAccelerator::IsDecoderCmdSupported() {
1677 // CMD_STOP should always succeed. If the decoder is started, the command can 1678 // CMD_STOP should always succeed. If the decoder is started, the command can
1678 // flush it. If the decoder is stopped, the command does nothing. We use this 1679 // flush it. If the decoder is stopped, the command does nothing. We use this
1679 // to know if a driver supports V4L2_DEC_CMD_STOP to flush. 1680 // to know if a driver supports V4L2_DEC_CMD_STOP to flush.
1680 struct v4l2_decoder_cmd cmd; 1681 struct v4l2_decoder_cmd cmd;
1681 memset(&cmd, 0, sizeof(cmd)); 1682 memset(&cmd, 0, sizeof(cmd));
1682 cmd.cmd = V4L2_DEC_CMD_STOP; 1683 cmd.cmd = V4L2_DEC_CMD_STOP;
1683 if (device_->Ioctl(VIDIOC_TRY_DECODER_CMD, &cmd) != 0) { 1684 if (device_->Ioctl(VIDIOC_TRY_DECODER_CMD, &cmd) != 0) {
1684 DVLOGF(3) "V4L2_DEC_CMD_STOP is not supported."; 1685 VLOGF(2) "V4L2_DEC_CMD_STOP is not supported.";
1685 return false; 1686 return false;
1686 } 1687 }
1687 1688
1688 return true; 1689 return true;
1689 } 1690 }
1690 1691
1691 bool V4L2VideoDecodeAccelerator::SendDecoderCmdStop() { 1692 bool V4L2VideoDecodeAccelerator::SendDecoderCmdStop() {
1692 DVLOGF(2); 1693 VLOGF(2);
1693 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 1694 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1694 DCHECK(!flush_awaiting_last_output_buffer_); 1695 DCHECK(!flush_awaiting_last_output_buffer_);
1695 1696
1696 struct v4l2_decoder_cmd cmd; 1697 struct v4l2_decoder_cmd cmd;
1697 memset(&cmd, 0, sizeof(cmd)); 1698 memset(&cmd, 0, sizeof(cmd));
1698 cmd.cmd = V4L2_DEC_CMD_STOP; 1699 cmd.cmd = V4L2_DEC_CMD_STOP;
1699 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_DECODER_CMD, &cmd); 1700 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_DECODER_CMD, &cmd);
1700 flush_awaiting_last_output_buffer_ = true; 1701 flush_awaiting_last_output_buffer_ = true;
1701 1702
1702 return true; 1703 return true;
1703 } 1704 }
1704 1705
1705 void V4L2VideoDecodeAccelerator::ResetTask() { 1706 void V4L2VideoDecodeAccelerator::ResetTask() {
1706 DVLOGF(3); 1707 VLOGF(2);
1707 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 1708 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1708 TRACE_EVENT0("Video Decoder", "V4L2VDA::ResetTask"); 1709 TRACE_EVENT0("Video Decoder", "V4L2VDA::ResetTask");
1709 1710
1710 if (decoder_state_ == kError) { 1711 if (decoder_state_ == kError) {
1711 DVLOGF(2) << "early out: kError state"; 1712 VLOGF(2) << "early out: kError state";
1712 return; 1713 return;
1713 } 1714 }
1714 decoder_current_bitstream_buffer_.reset(); 1715 decoder_current_bitstream_buffer_.reset();
1715 while (!decoder_input_queue_.empty()) 1716 while (!decoder_input_queue_.empty())
1716 decoder_input_queue_.pop(); 1717 decoder_input_queue_.pop();
1717 1718
1718 decoder_current_input_buffer_ = -1; 1719 decoder_current_input_buffer_ = -1;
1719 1720
1720 // If we are in the middle of switching resolutions or awaiting picture 1721 // If we are in the middle of switching resolutions or awaiting picture
1721 // buffers, postpone reset until it's done. We don't have to worry about 1722 // buffers, postpone reset until it's done. We don't have to worry about
1722 // timing of this wrt to decoding, because output pipe is already 1723 // timing of this wrt to decoding, because output pipe is already
1723 // stopped if we are changing resolution. We will come back here after 1724 // stopped if we are changing resolution. We will come back here after
1724 // we are done. 1725 // we are done.
1725 DCHECK(!reset_pending_); 1726 DCHECK(!reset_pending_);
1726 if (decoder_state_ == kChangingResolution || 1727 if (decoder_state_ == kChangingResolution ||
1727 decoder_state_ == kAwaitingPictureBuffers) { 1728 decoder_state_ == kAwaitingPictureBuffers) {
1728 reset_pending_ = true; 1729 reset_pending_ = true;
1729 return; 1730 return;
1730 } 1731 }
1731 FinishReset(); 1732 FinishReset();
1732 } 1733 }
1733 1734
1734 void V4L2VideoDecodeAccelerator::FinishReset() { 1735 void V4L2VideoDecodeAccelerator::FinishReset() {
1735 DVLOGF(3); 1736 VLOGF(2);
1736 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 1737 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1737 1738
1738 reset_pending_ = false; 1739 reset_pending_ = false;
1739 // After the output stream is stopped, the codec should not post any 1740 // After the output stream is stopped, the codec should not post any
1740 // resolution change events. So we dequeue the resolution change event 1741 // resolution change events. So we dequeue the resolution change event
1741 // afterwards. The event could be posted before or while stopping the output 1742 // afterwards. The event could be posted before or while stopping the output
1742 // stream. The codec will expect the buffer of new size after the seek, so 1743 // stream. The codec will expect the buffer of new size after the seek, so
1743 // we need to handle the resolution change event first. 1744 // we need to handle the resolution change event first.
1744 if (!(StopDevicePoll() && StopOutputStream())) 1745 if (!(StopDevicePoll() && StopOutputStream()))
1745 return; 1746 return;
(...skipping 21 matching lines...) Expand all
1767 // Mark that we're resetting, then enqueue a ResetDoneTask(). All intervening 1768 // Mark that we're resetting, then enqueue a ResetDoneTask(). All intervening
1768 // jobs will early-out in the kResetting state. 1769 // jobs will early-out in the kResetting state.
1769 decoder_state_ = kResetting; 1770 decoder_state_ = kResetting;
1770 SendPictureReady(); // Send all pending PictureReady. 1771 SendPictureReady(); // Send all pending PictureReady.
1771 decoder_thread_.task_runner()->PostTask( 1772 decoder_thread_.task_runner()->PostTask(
1772 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::ResetDoneTask, 1773 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::ResetDoneTask,
1773 base::Unretained(this))); 1774 base::Unretained(this)));
1774 } 1775 }
1775 1776
1776 void V4L2VideoDecodeAccelerator::ResetDoneTask() { 1777 void V4L2VideoDecodeAccelerator::ResetDoneTask() {
1777 DVLOGF(3); 1778 VLOGF(2);
1778 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 1779 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1779 TRACE_EVENT0("Video Decoder", "V4L2VDA::ResetDoneTask"); 1780 TRACE_EVENT0("Video Decoder", "V4L2VDA::ResetDoneTask");
1780 1781
1781 if (decoder_state_ == kError) { 1782 if (decoder_state_ == kError) {
1782 DVLOGF(2) << "early out: kError state"; 1783 VLOGF(2) << "early out: kError state";
1783 return; 1784 return;
1784 } 1785 }
1785 1786
1786 // Start poll thread if NotifyFlushDoneIfNeeded has not already. 1787 // Start poll thread if NotifyFlushDoneIfNeeded has not already.
1787 if (!device_poll_thread_.IsRunning()) { 1788 if (!device_poll_thread_.IsRunning()) {
1788 if (!StartDevicePoll()) 1789 if (!StartDevicePoll())
1789 return; 1790 return;
1790 } 1791 }
1791 1792
1792 // Reset format-specific bits. 1793 // Reset format-specific bits.
1793 if (video_profile_ >= H264PROFILE_MIN && video_profile_ <= H264PROFILE_MAX) { 1794 if (video_profile_ >= H264PROFILE_MIN && video_profile_ <= H264PROFILE_MAX) {
1794 decoder_h264_parser_.reset(new H264Parser()); 1795 decoder_h264_parser_.reset(new H264Parser());
1795 } 1796 }
1796 1797
1797 // Jobs drained, we're finished resetting. 1798 // Jobs drained, we're finished resetting.
1798 DCHECK_EQ(decoder_state_, kResetting); 1799 DCHECK_EQ(decoder_state_, kResetting);
1799 decoder_state_ = kInitialized; 1800 decoder_state_ = kInitialized;
1800 1801
1801 decoder_partial_frame_pending_ = false; 1802 decoder_partial_frame_pending_ = false;
1802 decoder_delay_bitstream_buffer_id_ = -1; 1803 decoder_delay_bitstream_buffer_id_ = -1;
1803 child_task_runner_->PostTask(FROM_HERE, 1804 child_task_runner_->PostTask(FROM_HERE,
1804 base::Bind(&Client::NotifyResetDone, client_)); 1805 base::Bind(&Client::NotifyResetDone, client_));
1805 1806
1806 // While we were resetting, we early-outed DecodeBufferTask()s. 1807 // While we were resetting, we early-outed DecodeBufferTask()s.
1807 ScheduleDecodeBufferTaskIfNeeded(); 1808 ScheduleDecodeBufferTaskIfNeeded();
1808 } 1809 }
1809 1810
1810 void V4L2VideoDecodeAccelerator::DestroyTask() { 1811 void V4L2VideoDecodeAccelerator::DestroyTask() {
1811 DVLOGF(3); 1812 VLOGF(2);
1812 TRACE_EVENT0("Video Decoder", "V4L2VDA::DestroyTask"); 1813 TRACE_EVENT0("Video Decoder", "V4L2VDA::DestroyTask");
1813 1814
1814 // DestroyTask() should run regardless of decoder_state_. 1815 // DestroyTask() should run regardless of decoder_state_.
1815 1816
1816 StopDevicePoll(); 1817 StopDevicePoll();
1817 StopOutputStream(); 1818 StopOutputStream();
1818 StopInputStream(); 1819 StopInputStream();
1819 1820
1820 decoder_current_bitstream_buffer_.reset(); 1821 decoder_current_bitstream_buffer_.reset();
1821 decoder_current_input_buffer_ = -1; 1822 decoder_current_input_buffer_ = -1;
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
1872 // Clear the interrupt now, to be sure. 1873 // Clear the interrupt now, to be sure.
1873 if (!device_->ClearDevicePollInterrupt()) { 1874 if (!device_->ClearDevicePollInterrupt()) {
1874 NOTIFY_ERROR(PLATFORM_FAILURE); 1875 NOTIFY_ERROR(PLATFORM_FAILURE);
1875 return false; 1876 return false;
1876 } 1877 }
1877 DVLOGF(3) << "device poll stopped"; 1878 DVLOGF(3) << "device poll stopped";
1878 return true; 1879 return true;
1879 } 1880 }
1880 1881
1881 bool V4L2VideoDecodeAccelerator::StopOutputStream() { 1882 bool V4L2VideoDecodeAccelerator::StopOutputStream() {
1882 DVLOGF(3); 1883 VLOGF(2);
1883 if (!output_streamon_) 1884 if (!output_streamon_)
1884 return true; 1885 return true;
1885 1886
1886 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 1887 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1887 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type); 1888 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
1888 output_streamon_ = false; 1889 output_streamon_ = false;
1889 1890
1890 // Output stream is stopped. No need to wait for the buffer anymore. 1891 // Output stream is stopped. No need to wait for the buffer anymore.
1891 flush_awaiting_last_output_buffer_ = false; 1892 flush_awaiting_last_output_buffer_ = false;
1892 1893
1893 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { 1894 for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
1894 // After streamoff, the device drops ownership of all buffers, even if we 1895 // After streamoff, the device drops ownership of all buffers, even if we
1895 // don't dequeue them explicitly. Some of them may still be owned by the 1896 // don't dequeue them explicitly. Some of them may still be owned by the
1896 // client however. Reuse only those that aren't. 1897 // client however. Reuse only those that aren't.
1897 OutputRecord& output_record = output_buffer_map_[i]; 1898 OutputRecord& output_record = output_buffer_map_[i];
1898 if (output_record.state == kAtDevice) { 1899 if (output_record.state == kAtDevice) {
1899 output_record.state = kFree; 1900 output_record.state = kFree;
1900 free_output_buffers_.push_back(i); 1901 free_output_buffers_.push_back(i);
1901 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR); 1902 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
1902 } 1903 }
1903 } 1904 }
1904 output_buffer_queued_count_ = 0; 1905 output_buffer_queued_count_ = 0;
1905 return true; 1906 return true;
1906 } 1907 }
1907 1908
1908 bool V4L2VideoDecodeAccelerator::StopInputStream() { 1909 bool V4L2VideoDecodeAccelerator::StopInputStream() {
1909 DVLOGF(3); 1910 VLOGF(2);
1910 if (!input_streamon_) 1911 if (!input_streamon_)
1911 return true; 1912 return true;
1912 1913
1913 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; 1914 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1914 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type); 1915 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
1915 input_streamon_ = false; 1916 input_streamon_ = false;
1916 1917
1917 // Reset accounting info for input. 1918 // Reset accounting info for input.
1918 while (!input_ready_queue_.empty()) 1919 while (!input_ready_queue_.empty())
1919 input_ready_queue_.pop(); 1920 input_ready_queue_.pop();
1920 free_input_buffers_.clear(); 1921 free_input_buffers_.clear();
1921 for (size_t i = 0; i < input_buffer_map_.size(); ++i) { 1922 for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
1922 free_input_buffers_.push_back(i); 1923 free_input_buffers_.push_back(i);
1923 input_buffer_map_[i].at_device = false; 1924 input_buffer_map_[i].at_device = false;
1924 input_buffer_map_[i].bytes_used = 0; 1925 input_buffer_map_[i].bytes_used = 0;
1925 input_buffer_map_[i].input_id = -1; 1926 input_buffer_map_[i].input_id = -1;
1926 } 1927 }
1927 input_buffer_queued_count_ = 0; 1928 input_buffer_queued_count_ = 0;
1928 1929
1929 return true; 1930 return true;
1930 } 1931 }
1931 1932
1932 void V4L2VideoDecodeAccelerator::StartResolutionChange() { 1933 void V4L2VideoDecodeAccelerator::StartResolutionChange() {
1933 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 1934 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1934 DCHECK_NE(decoder_state_, kUninitialized); 1935 DCHECK_NE(decoder_state_, kUninitialized);
1935 DCHECK_NE(decoder_state_, kResetting); 1936 DCHECK_NE(decoder_state_, kResetting);
1936 1937
1937 DVLOGF(3) << "Initiate resolution change"; 1938 VLOGF(2) << "Initiate resolution change";
1938 1939
1939 if (!(StopDevicePoll() && StopOutputStream())) 1940 if (!(StopDevicePoll() && StopOutputStream()))
1940 return; 1941 return;
1941 1942
1942 decoder_state_ = kChangingResolution; 1943 decoder_state_ = kChangingResolution;
1943 SendPictureReady(); // Send all pending PictureReady. 1944 SendPictureReady(); // Send all pending PictureReady.
1944 1945
1945 if (!image_processor_bitstream_buffer_ids_.empty()) { 1946 if (!image_processor_bitstream_buffer_ids_.empty()) {
1946 DVLOGF(3) << "Wait image processor to finish before destroying buffers."; 1947 VLOGF(2) << "Wait image processor to finish before destroying buffers.";
1947 return; 1948 return;
1948 } 1949 }
1949 1950
1950 if (image_processor_) 1951 if (image_processor_)
1951 image_processor_.release()->Destroy(); 1952 image_processor_.release()->Destroy();
1952 1953
1953 if (!DestroyOutputBuffers()) { 1954 if (!DestroyOutputBuffers()) {
1954 LOGF(ERROR) << "Failed destroying output buffers."; 1955 LOGF(ERROR) << "Failed destroying output buffers.";
1955 NOTIFY_ERROR(PLATFORM_FAILURE); 1956 NOTIFY_ERROR(PLATFORM_FAILURE);
1956 return; 1957 return;
1957 } 1958 }
1958 1959
1959 FinishResolutionChange(); 1960 FinishResolutionChange();
1960 } 1961 }
1961 1962
1962 void V4L2VideoDecodeAccelerator::FinishResolutionChange() { 1963 void V4L2VideoDecodeAccelerator::FinishResolutionChange() {
1963 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 1964 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1964 DCHECK_EQ(decoder_state_, kChangingResolution); 1965 DCHECK_EQ(decoder_state_, kChangingResolution);
1965 DVLOGF(3); 1966 VLOGF(2);
1966 1967
1967 if (decoder_state_ == kError) { 1968 if (decoder_state_ == kError) {
1968 DVLOGF(2) << "early out: kError state"; 1969 VLOGF(2) << "early out: kError state";
1969 return; 1970 return;
1970 } 1971 }
1971 1972
1972 struct v4l2_format format; 1973 struct v4l2_format format;
1973 bool again; 1974 bool again;
1974 gfx::Size visible_size; 1975 gfx::Size visible_size;
1975 bool ret = GetFormatInfo(&format, &visible_size, &again); 1976 bool ret = GetFormatInfo(&format, &visible_size, &again);
1976 if (!ret || again) { 1977 if (!ret || again) {
1977 LOGF(ERROR) << "Couldn't get format information after resolution change"; 1978 LOGF(ERROR) << "Couldn't get format information after resolution change";
1978 NOTIFY_ERROR(PLATFORM_FAILURE); 1979 NOTIFY_ERROR(PLATFORM_FAILURE);
1979 return; 1980 return;
1980 } 1981 }
1981 1982
1982 if (!CreateBuffersForFormat(format, visible_size)) { 1983 if (!CreateBuffersForFormat(format, visible_size)) {
1983 LOGF(ERROR) << "Couldn't reallocate buffers after resolution change"; 1984 LOGF(ERROR) << "Couldn't reallocate buffers after resolution change";
1984 NOTIFY_ERROR(PLATFORM_FAILURE); 1985 NOTIFY_ERROR(PLATFORM_FAILURE);
1985 return; 1986 return;
1986 } 1987 }
1987 1988
1988 if (!StartDevicePoll()) 1989 if (!StartDevicePoll())
1989 return; 1990 return;
1990 } 1991 }
1991 1992
1992 void V4L2VideoDecodeAccelerator::DevicePollTask(bool poll_device) { 1993 void V4L2VideoDecodeAccelerator::DevicePollTask(bool poll_device) {
1993 DVLOGF(3); 1994 DVLOGF(4);
1994 DCHECK(device_poll_thread_.task_runner()->BelongsToCurrentThread()); 1995 DCHECK(device_poll_thread_.task_runner()->BelongsToCurrentThread());
1995 TRACE_EVENT0("Video Decoder", "V4L2VDA::DevicePollTask"); 1996 TRACE_EVENT0("Video Decoder", "V4L2VDA::DevicePollTask");
1996 1997
1997 bool event_pending = false; 1998 bool event_pending = false;
1998 1999
1999 if (!device_->Poll(poll_device, &event_pending)) { 2000 if (!device_->Poll(poll_device, &event_pending)) {
2000 NOTIFY_ERROR(PLATFORM_FAILURE); 2001 NOTIFY_ERROR(PLATFORM_FAILURE);
2001 return; 2002 return;
2002 } 2003 }
2003 2004
2004 // All processing should happen on ServiceDeviceTask(), since we shouldn't 2005 // All processing should happen on ServiceDeviceTask(), since we shouldn't
2005 // touch decoder state from this thread. 2006 // touch decoder state from this thread.
2006 decoder_thread_.task_runner()->PostTask( 2007 decoder_thread_.task_runner()->PostTask(
2007 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::ServiceDeviceTask, 2008 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::ServiceDeviceTask,
2008 base::Unretained(this), event_pending)); 2009 base::Unretained(this), event_pending));
2009 } 2010 }
2010 2011
2011 void V4L2VideoDecodeAccelerator::NotifyError(Error error) { 2012 void V4L2VideoDecodeAccelerator::NotifyError(Error error) {
2012 DVLOGF(2); 2013 VLOGF(2);
2013 2014
2014 if (!child_task_runner_->BelongsToCurrentThread()) { 2015 if (!child_task_runner_->BelongsToCurrentThread()) {
2015 child_task_runner_->PostTask( 2016 child_task_runner_->PostTask(
2016 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::NotifyError, 2017 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::NotifyError,
2017 weak_this_, error)); 2018 weak_this_, error));
2018 return; 2019 return;
2019 } 2020 }
2020 2021
2021 if (client_) { 2022 if (client_) {
2022 client_->NotifyError(error); 2023 client_->NotifyError(error);
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
2089 if (!V4L2ImageProcessor::TryOutputFormat( 2090 if (!V4L2ImageProcessor::TryOutputFormat(
2090 output_format_fourcc_, egl_image_format_fourcc_, &egl_image_size_, 2091 output_format_fourcc_, egl_image_format_fourcc_, &egl_image_size_,
2091 &egl_image_planes_count_)) { 2092 &egl_image_planes_count_)) {
2092 LOGF(ERROR) << "Fail to get output size and plane count of processor"; 2093 LOGF(ERROR) << "Fail to get output size and plane count of processor";
2093 return false; 2094 return false;
2094 } 2095 }
2095 } else { 2096 } else {
2096 egl_image_size_ = coded_size_; 2097 egl_image_size_ = coded_size_;
2097 egl_image_planes_count_ = output_planes_count_; 2098 egl_image_planes_count_ = output_planes_count_;
2098 } 2099 }
2099 DVLOGF(3) << "new resolution: " << coded_size_.ToString() 2100 VLOGF(2) << "new resolution: " << coded_size_.ToString()
2100 << ", visible size: " << visible_size_.ToString() 2101 << ", visible size: " << visible_size_.ToString()
2101 << ", decoder output planes count: " << output_planes_count_ 2102 << ", decoder output planes count: " << output_planes_count_
2102 << ", EGLImage size: " << egl_image_size_.ToString() 2103 << ", EGLImage size: " << egl_image_size_.ToString()
2103 << ", EGLImage plane count: " << egl_image_planes_count_; 2104 << ", EGLImage plane count: " << egl_image_planes_count_;
2104 2105
2105 return CreateOutputBuffers(); 2106 return CreateOutputBuffers();
2106 } 2107 }
2107 2108
2108 gfx::Size V4L2VideoDecodeAccelerator::GetVisibleSize( 2109 gfx::Size V4L2VideoDecodeAccelerator::GetVisibleSize(
2109 const gfx::Size& coded_size) { 2110 const gfx::Size& coded_size) {
2110 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 2111 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
2111 2112
2112 struct v4l2_crop crop_arg; 2113 struct v4l2_crop crop_arg;
2113 memset(&crop_arg, 0, sizeof(crop_arg)); 2114 memset(&crop_arg, 0, sizeof(crop_arg));
2114 crop_arg.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 2115 crop_arg.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2115 2116
2116 if (device_->Ioctl(VIDIOC_G_CROP, &crop_arg) != 0) { 2117 if (device_->Ioctl(VIDIOC_G_CROP, &crop_arg) != 0) {
2117 PLOGF(ERROR) << "ioctl() VIDIOC_G_CROP failed"; 2118 PLOGF(ERROR) << "ioctl() VIDIOC_G_CROP failed";
2118 return coded_size; 2119 return coded_size;
2119 } 2120 }
2120 2121
2121 gfx::Rect rect(crop_arg.c.left, crop_arg.c.top, crop_arg.c.width, 2122 gfx::Rect rect(crop_arg.c.left, crop_arg.c.top, crop_arg.c.width,
2122 crop_arg.c.height); 2123 crop_arg.c.height);
2123 DVLOGF(3) << "visible rectangle is " << rect.ToString(); 2124 VLOGF(2) << "visible rectangle is " << rect.ToString();
2124 if (!gfx::Rect(coded_size).Contains(rect)) { 2125 if (!gfx::Rect(coded_size).Contains(rect)) {
2125 DLOGF(ERROR) << "visible rectangle " << rect.ToString() 2126 DLOGF(ERROR) << "visible rectangle " << rect.ToString()
2126 << " is not inside coded size " << coded_size.ToString(); 2127 << " is not inside coded size " << coded_size.ToString();
2127 return coded_size; 2128 return coded_size;
2128 } 2129 }
2129 if (rect.IsEmpty()) { 2130 if (rect.IsEmpty()) {
2130 DLOGF(ERROR) << "visible size is empty"; 2131 DLOGF(ERROR) << "visible size is empty";
2131 return coded_size; 2132 return coded_size;
2132 } 2133 }
2133 2134
2134 // Chrome assume picture frame is coded at (0, 0). 2135 // Chrome assume picture frame is coded at (0, 0).
2135 if (!rect.origin().IsOrigin()) { 2136 if (!rect.origin().IsOrigin()) {
2136 DLOGF(ERROR) << "Unexpected visible rectangle " << rect.ToString() 2137 DLOGF(ERROR) << "Unexpected visible rectangle " << rect.ToString()
2137 << ", top-left is not origin"; 2138 << ", top-left is not origin";
2138 return coded_size; 2139 return coded_size;
2139 } 2140 }
2140 2141
2141 return rect.size(); 2142 return rect.size();
2142 } 2143 }
2143 2144
2144 bool V4L2VideoDecodeAccelerator::CreateInputBuffers() { 2145 bool V4L2VideoDecodeAccelerator::CreateInputBuffers() {
2145 DVLOGF(3); 2146 VLOGF(2);
2146 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 2147 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
2147 // We always run this as we prepare to initialize. 2148 // We always run this as we prepare to initialize.
2148 DCHECK_EQ(decoder_state_, kInitialized); 2149 DCHECK_EQ(decoder_state_, kInitialized);
2149 DCHECK(!input_streamon_); 2150 DCHECK(!input_streamon_);
2150 DCHECK(input_buffer_map_.empty()); 2151 DCHECK(input_buffer_map_.empty());
2151 2152
2152 struct v4l2_requestbuffers reqbufs; 2153 struct v4l2_requestbuffers reqbufs;
2153 memset(&reqbufs, 0, sizeof(reqbufs)); 2154 memset(&reqbufs, 0, sizeof(reqbufs));
2154 reqbufs.count = kInputBufferCount; 2155 reqbufs.count = kInputBufferCount;
2155 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; 2156 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
2208 bool is_format_supported = false; 2209 bool is_format_supported = false;
2209 while (device_->Ioctl(VIDIOC_ENUM_FMT, &fmtdesc) == 0) { 2210 while (device_->Ioctl(VIDIOC_ENUM_FMT, &fmtdesc) == 0) {
2210 if (fmtdesc.pixelformat == input_format_fourcc_) { 2211 if (fmtdesc.pixelformat == input_format_fourcc_) {
2211 is_format_supported = true; 2212 is_format_supported = true;
2212 break; 2213 break;
2213 } 2214 }
2214 ++fmtdesc.index; 2215 ++fmtdesc.index;
2215 } 2216 }
2216 2217
2217 if (!is_format_supported) { 2218 if (!is_format_supported) {
2218 DVLOGF(1) << "Input fourcc " << input_format_fourcc_ 2219 VLOGF(1) << "Input fourcc " << input_format_fourcc_
2219 << " not supported by device."; 2220 << " not supported by device.";
2220 return false; 2221 return false;
2221 } 2222 }
2222 2223
2223 struct v4l2_format format; 2224 struct v4l2_format format;
2224 memset(&format, 0, sizeof(format)); 2225 memset(&format, 0, sizeof(format));
2225 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; 2226 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
2226 format.fmt.pix_mp.pixelformat = input_format_fourcc_; 2227 format.fmt.pix_mp.pixelformat = input_format_fourcc_;
2227 format.fmt.pix_mp.plane_fmt[0].sizeimage = input_size; 2228 format.fmt.pix_mp.plane_fmt[0].sizeimage = input_size;
2228 format.fmt.pix_mp.num_planes = 1; 2229 format.fmt.pix_mp.num_planes = 1;
2229 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format); 2230 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
2230 2231
2231 // We have to set up the format for output, because the driver may not allow 2232 // We have to set up the format for output, because the driver may not allow
2232 // changing it once we start streaming; whether it can support our chosen 2233 // changing it once we start streaming; whether it can support our chosen
2233 // output format or not may depend on the input format. 2234 // output format or not may depend on the input format.
2234 memset(&fmtdesc, 0, sizeof(fmtdesc)); 2235 memset(&fmtdesc, 0, sizeof(fmtdesc));
2235 fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 2236 fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2236 while (device_->Ioctl(VIDIOC_ENUM_FMT, &fmtdesc) == 0) { 2237 while (device_->Ioctl(VIDIOC_ENUM_FMT, &fmtdesc) == 0) {
2237 if (device_->CanCreateEGLImageFrom(fmtdesc.pixelformat)) { 2238 if (device_->CanCreateEGLImageFrom(fmtdesc.pixelformat)) {
2238 output_format_fourcc_ = fmtdesc.pixelformat; 2239 output_format_fourcc_ = fmtdesc.pixelformat;
2239 break; 2240 break;
2240 } 2241 }
2241 ++fmtdesc.index; 2242 ++fmtdesc.index;
2242 } 2243 }
2243 2244
2244 DCHECK(!image_processor_device_); 2245 DCHECK(!image_processor_device_);
2245 if (output_format_fourcc_ == 0) { 2246 if (output_format_fourcc_ == 0) {
2246 DVLOGF(1) << "Could not find a usable output format. Try image processor"; 2247 VLOGF(1) << "Could not find a usable output format. Try image processor";
2247 if (!V4L2ImageProcessor::IsSupported()) { 2248 if (!V4L2ImageProcessor::IsSupported()) {
2248 DVLOGF(1) << "Image processor not available"; 2249 VLOGF(1) << "Image processor not available";
2249 return false; 2250 return false;
2250 } 2251 }
2251 output_format_fourcc_ = FindImageProcessorInputFormat(); 2252 output_format_fourcc_ = FindImageProcessorInputFormat();
2252 if (output_format_fourcc_ == 0) { 2253 if (output_format_fourcc_ == 0) {
2253 LOGF(ERROR) << "Can't find a usable input format from image processor"; 2254 LOGF(ERROR) << "Can't find a usable input format from image processor";
2254 return false; 2255 return false;
2255 } 2256 }
2256 egl_image_format_fourcc_ = FindImageProcessorOutputFormat(); 2257 egl_image_format_fourcc_ = FindImageProcessorOutputFormat();
2257 if (egl_image_format_fourcc_ == 0) { 2258 if (egl_image_format_fourcc_ == 0) {
2258 LOGF(ERROR) << "Can't find a usable output format from image processor"; 2259 LOGF(ERROR) << "Can't find a usable output format from image processor";
2259 return false; 2260 return false;
2260 } 2261 }
2261 image_processor_device_ = V4L2Device::Create(); 2262 image_processor_device_ = V4L2Device::Create();
2262 if (!image_processor_device_) { 2263 if (!image_processor_device_) {
2263 DVLOGF(1) << "Could not create a V4L2Device for image processor"; 2264 VLOGF(1) << "Could not create a V4L2Device for image processor";
2264 return false; 2265 return false;
2265 } 2266 }
2266 egl_image_device_ = image_processor_device_; 2267 egl_image_device_ = image_processor_device_;
2267 } else { 2268 } else {
2268 if (output_mode_ == Config::OutputMode::IMPORT) { 2269 if (output_mode_ == Config::OutputMode::IMPORT) {
2269 LOGF(ERROR) << "Import mode without image processor is not implemented " 2270 LOGF(ERROR) << "Import mode without image processor is not implemented "
2270 << "yet."; 2271 << "yet.";
2271 return false; 2272 return false;
2272 } 2273 }
2273 egl_image_format_fourcc_ = output_format_fourcc_; 2274 egl_image_format_fourcc_ = output_format_fourcc_;
2274 egl_image_device_ = device_; 2275 egl_image_device_ = device_;
2275 } 2276 }
2276 DVLOGF(2) << "Output format=" << output_format_fourcc_; 2277 VLOGF(2) << "Output format=" << output_format_fourcc_;
2277 2278
2278 // Just set the fourcc for output; resolution, etc., will come from the 2279 // Just set the fourcc for output; resolution, etc., will come from the
2279 // driver once it extracts it from the stream. 2280 // driver once it extracts it from the stream.
2280 memset(&format, 0, sizeof(format)); 2281 memset(&format, 0, sizeof(format));
2281 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 2282 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2282 format.fmt.pix_mp.pixelformat = output_format_fourcc_; 2283 format.fmt.pix_mp.pixelformat = output_format_fourcc_;
2283 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format); 2284 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
2284 2285
2285 return true; 2286 return true;
2286 } 2287 }
2287 2288
2288 uint32_t V4L2VideoDecodeAccelerator::FindImageProcessorInputFormat() { 2289 uint32_t V4L2VideoDecodeAccelerator::FindImageProcessorInputFormat() {
2289 std::vector<uint32_t> processor_input_formats = 2290 std::vector<uint32_t> processor_input_formats =
2290 V4L2ImageProcessor::GetSupportedInputFormats(); 2291 V4L2ImageProcessor::GetSupportedInputFormats();
2291 2292
2292 struct v4l2_fmtdesc fmtdesc; 2293 struct v4l2_fmtdesc fmtdesc;
2293 memset(&fmtdesc, 0, sizeof(fmtdesc)); 2294 memset(&fmtdesc, 0, sizeof(fmtdesc));
2294 fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 2295 fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2295 while (device_->Ioctl(VIDIOC_ENUM_FMT, &fmtdesc) == 0) { 2296 while (device_->Ioctl(VIDIOC_ENUM_FMT, &fmtdesc) == 0) {
2296 if (std::find(processor_input_formats.begin(), 2297 if (std::find(processor_input_formats.begin(),
2297 processor_input_formats.end(), 2298 processor_input_formats.end(),
2298 fmtdesc.pixelformat) != processor_input_formats.end()) { 2299 fmtdesc.pixelformat) != processor_input_formats.end()) {
2299 DVLOGF(1) << "Image processor input format=" << fmtdesc.description; 2300 VLOGF(2) << "Image processor input format=" << fmtdesc.description;
2300 return fmtdesc.pixelformat; 2301 return fmtdesc.pixelformat;
2301 } 2302 }
2302 ++fmtdesc.index; 2303 ++fmtdesc.index;
2303 } 2304 }
2304 return 0; 2305 return 0;
2305 } 2306 }
2306 2307
2307 uint32_t V4L2VideoDecodeAccelerator::FindImageProcessorOutputFormat() { 2308 uint32_t V4L2VideoDecodeAccelerator::FindImageProcessorOutputFormat() {
2308 // Prefer YVU420 and NV12 because ArcGpuVideoDecodeAccelerator only supports 2309 // Prefer YVU420 and NV12 because ArcGpuVideoDecodeAccelerator only supports
2309 // single physical plane. Prefer YVU420 over NV12 because chrome rendering 2310 // single physical plane. Prefer YVU420 over NV12 because chrome rendering
(...skipping 10 matching lines...) Expand all
2320 2321
2321 std::vector<uint32_t> processor_output_formats = 2322 std::vector<uint32_t> processor_output_formats =
2322 V4L2ImageProcessor::GetSupportedOutputFormats(); 2323 V4L2ImageProcessor::GetSupportedOutputFormats();
2323 2324
2324 // Move the preferred formats to the front. 2325 // Move the preferred formats to the front.
2325 std::sort(processor_output_formats.begin(), processor_output_formats.end(), 2326 std::sort(processor_output_formats.begin(), processor_output_formats.end(),
2326 preferred_formats_first); 2327 preferred_formats_first);
2327 2328
2328 for (uint32_t processor_output_format : processor_output_formats) { 2329 for (uint32_t processor_output_format : processor_output_formats) {
2329 if (device_->CanCreateEGLImageFrom(processor_output_format)) { 2330 if (device_->CanCreateEGLImageFrom(processor_output_format)) {
2330 DVLOGF(1) << "Image processor output format=" << processor_output_format; 2331 VLOGF(2) << "Image processor output format=" << processor_output_format;
2331 return processor_output_format; 2332 return processor_output_format;
2332 } 2333 }
2333 } 2334 }
2334 2335
2335 return 0; 2336 return 0;
2336 } 2337 }
2337 2338
2338 bool V4L2VideoDecodeAccelerator::ResetImageProcessor() { 2339 bool V4L2VideoDecodeAccelerator::ResetImageProcessor() {
2339 DVLOGF(3); 2340 VLOGF(2);
2340 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 2341 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
2341 2342
2342 if (!image_processor_->Reset()) 2343 if (!image_processor_->Reset())
2343 return false; 2344 return false;
2344 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { 2345 for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
2345 OutputRecord& output_record = output_buffer_map_[i]; 2346 OutputRecord& output_record = output_buffer_map_[i];
2346 if (output_record.state == kAtProcessor) { 2347 if (output_record.state == kAtProcessor) {
2347 output_record.state = kFree; 2348 output_record.state = kFree;
2348 free_output_buffers_.push_back(i); 2349 free_output_buffers_.push_back(i);
2349 } 2350 }
2350 } 2351 }
2351 while (!image_processor_bitstream_buffer_ids_.empty()) 2352 while (!image_processor_bitstream_buffer_ids_.empty())
2352 image_processor_bitstream_buffer_ids_.pop(); 2353 image_processor_bitstream_buffer_ids_.pop();
2353 2354
2354 return true; 2355 return true;
2355 } 2356 }
2356 2357
2357 bool V4L2VideoDecodeAccelerator::CreateImageProcessor() { 2358 bool V4L2VideoDecodeAccelerator::CreateImageProcessor() {
2358 DVLOGF(3); 2359 VLOGF(2);
2359 DCHECK(!image_processor_); 2360 DCHECK(!image_processor_);
2360 image_processor_.reset(new V4L2ImageProcessor(image_processor_device_)); 2361 image_processor_.reset(new V4L2ImageProcessor(image_processor_device_));
2361 v4l2_memory output_memory_type = 2362 v4l2_memory output_memory_type =
2362 (output_mode_ == Config::OutputMode::ALLOCATE ? V4L2_MEMORY_MMAP 2363 (output_mode_ == Config::OutputMode::ALLOCATE ? V4L2_MEMORY_MMAP
2363 : V4L2_MEMORY_DMABUF); 2364 : V4L2_MEMORY_DMABUF);
2364 // Unretained is safe because |this| owns image processor and there will be 2365 // Unretained is safe because |this| owns image processor and there will be
2365 // no callbacks after processor destroys. 2366 // no callbacks after processor destroys.
2366 if (!image_processor_->Initialize( 2367 if (!image_processor_->Initialize(
2367 V4L2Device::V4L2PixFmtToVideoPixelFormat(output_format_fourcc_), 2368 V4L2Device::V4L2PixFmtToVideoPixelFormat(output_format_fourcc_),
2368 V4L2Device::V4L2PixFmtToVideoPixelFormat(egl_image_format_fourcc_), 2369 V4L2Device::V4L2PixFmtToVideoPixelFormat(egl_image_format_fourcc_),
2369 V4L2_MEMORY_DMABUF, output_memory_type, visible_size_, coded_size_, 2370 V4L2_MEMORY_DMABUF, output_memory_type, visible_size_, coded_size_,
2370 visible_size_, egl_image_size_, output_buffer_map_.size(), 2371 visible_size_, egl_image_size_, output_buffer_map_.size(),
2371 base::Bind(&V4L2VideoDecodeAccelerator::ImageProcessorError, 2372 base::Bind(&V4L2VideoDecodeAccelerator::ImageProcessorError,
2372 base::Unretained(this)))) { 2373 base::Unretained(this)))) {
2373 LOGF(ERROR) << "Initialize image processor failed"; 2374 LOGF(ERROR) << "Initialize image processor failed";
2374 NOTIFY_ERROR(PLATFORM_FAILURE); 2375 NOTIFY_ERROR(PLATFORM_FAILURE);
2375 return false; 2376 return false;
2376 } 2377 }
2377 DVLOGF(3) << "image_processor_->output_allocated_size()=" 2378 VLOGF(2) << "image_processor_->output_allocated_size()="
2378 << image_processor_->output_allocated_size().ToString(); 2379 << image_processor_->output_allocated_size().ToString();
2379 DCHECK(image_processor_->output_allocated_size() == egl_image_size_); 2380 DCHECK(image_processor_->output_allocated_size() == egl_image_size_);
2380 if (image_processor_->input_allocated_size() != coded_size_) { 2381 if (image_processor_->input_allocated_size() != coded_size_) {
2381 LOGF(ERROR) << "Image processor should be able to take the output coded " 2382 LOGF(ERROR) << "Image processor should be able to take the output coded "
2382 << "size of decoder " << coded_size_.ToString() 2383 << "size of decoder " << coded_size_.ToString()
2383 << " without adjusting to " 2384 << " without adjusting to "
2384 << image_processor_->input_allocated_size().ToString(); 2385 << image_processor_->input_allocated_size().ToString();
2385 NOTIFY_ERROR(PLATFORM_FAILURE); 2386 NOTIFY_ERROR(PLATFORM_FAILURE);
2386 return false; 2387 return false;
2387 } 2388 }
2388 return true; 2389 return true;
2389 } 2390 }
2390 2391
2391 bool V4L2VideoDecodeAccelerator::ProcessFrame(int32_t bitstream_buffer_id, 2392 bool V4L2VideoDecodeAccelerator::ProcessFrame(int32_t bitstream_buffer_id,
2392 int output_buffer_index) { 2393 int output_buffer_index) {
2393 DVLOGF(3); 2394 DVLOGF(4);
2394 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 2395 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
2395 2396
2396 OutputRecord& output_record = output_buffer_map_[output_buffer_index]; 2397 OutputRecord& output_record = output_buffer_map_[output_buffer_index];
2397 DCHECK_EQ(output_record.state, kAtDevice); 2398 DCHECK_EQ(output_record.state, kAtDevice);
2398 output_record.state = kAtProcessor; 2399 output_record.state = kAtProcessor;
2399 image_processor_bitstream_buffer_ids_.push(bitstream_buffer_id); 2400 image_processor_bitstream_buffer_ids_.push(bitstream_buffer_id);
2400 std::vector<int> processor_input_fds; 2401 std::vector<int> processor_input_fds;
2401 for (auto& fd : output_record.processor_input_fds) { 2402 for (auto& fd : output_record.processor_input_fds) {
2402 processor_input_fds.push_back(fd.get()); 2403 processor_input_fds.push_back(fd.get());
2403 } 2404 }
(...skipping 16 matching lines...) Expand all
2420 // Unretained is safe because |this| owns image processor and there will 2421 // Unretained is safe because |this| owns image processor and there will
2421 // be no callbacks after processor destroys. 2422 // be no callbacks after processor destroys.
2422 image_processor_->Process( 2423 image_processor_->Process(
2423 input_frame, output_buffer_index, std::move(processor_output_fds), 2424 input_frame, output_buffer_index, std::move(processor_output_fds),
2424 base::Bind(&V4L2VideoDecodeAccelerator::FrameProcessed, 2425 base::Bind(&V4L2VideoDecodeAccelerator::FrameProcessed,
2425 base::Unretained(this), bitstream_buffer_id)); 2426 base::Unretained(this), bitstream_buffer_id));
2426 return true; 2427 return true;
2427 } 2428 }
2428 2429
2429 bool V4L2VideoDecodeAccelerator::CreateOutputBuffers() { 2430 bool V4L2VideoDecodeAccelerator::CreateOutputBuffers() {
2430 DVLOGF(3); 2431 VLOGF(2);
2431 DCHECK(decoder_state_ == kInitialized || 2432 DCHECK(decoder_state_ == kInitialized ||
2432 decoder_state_ == kChangingResolution); 2433 decoder_state_ == kChangingResolution);
2433 DCHECK(!output_streamon_); 2434 DCHECK(!output_streamon_);
2434 DCHECK(output_buffer_map_.empty()); 2435 DCHECK(output_buffer_map_.empty());
2435 2436
2436 // Number of output buffers we need. 2437 // Number of output buffers we need.
2437 struct v4l2_control ctrl; 2438 struct v4l2_control ctrl;
2438 memset(&ctrl, 0, sizeof(ctrl)); 2439 memset(&ctrl, 0, sizeof(ctrl));
2439 ctrl.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE; 2440 ctrl.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE;
2440 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_G_CTRL, &ctrl); 2441 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_G_CTRL, &ctrl);
(...skipping 27 matching lines...) Expand all
2468 // non-slice NALUs and could even get another resolution change before we were 2469 // non-slice NALUs and could even get another resolution change before we were
2469 // done with this one. After we get the buffers, we'll go back into kIdle and 2470 // done with this one. After we get the buffers, we'll go back into kIdle and
2470 // kick off further event processing, and eventually go back into kDecoding 2471 // kick off further event processing, and eventually go back into kDecoding
2471 // once no more events are pending (if any). 2472 // once no more events are pending (if any).
2472 decoder_state_ = kAwaitingPictureBuffers; 2473 decoder_state_ = kAwaitingPictureBuffers;
2473 2474
2474 return true; 2475 return true;
2475 } 2476 }
2476 2477
2477 void V4L2VideoDecodeAccelerator::DestroyInputBuffers() { 2478 void V4L2VideoDecodeAccelerator::DestroyInputBuffers() {
2478 DVLOGF(3); 2479 VLOGF(2);
2479 DCHECK(!decoder_thread_.IsRunning() || 2480 DCHECK(!decoder_thread_.IsRunning() ||
2480 decoder_thread_.task_runner()->BelongsToCurrentThread()); 2481 decoder_thread_.task_runner()->BelongsToCurrentThread());
2481 DCHECK(!input_streamon_); 2482 DCHECK(!input_streamon_);
2482 2483
2483 if (input_buffer_map_.empty()) 2484 if (input_buffer_map_.empty())
2484 return; 2485 return;
2485 2486
2486 for (size_t i = 0; i < input_buffer_map_.size(); ++i) { 2487 for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
2487 if (input_buffer_map_[i].address != NULL) { 2488 if (input_buffer_map_[i].address != NULL) {
2488 device_->Munmap(input_buffer_map_[i].address, 2489 device_->Munmap(input_buffer_map_[i].address,
2489 input_buffer_map_[i].length); 2490 input_buffer_map_[i].length);
2490 } 2491 }
2491 } 2492 }
2492 2493
2493 struct v4l2_requestbuffers reqbufs; 2494 struct v4l2_requestbuffers reqbufs;
2494 memset(&reqbufs, 0, sizeof(reqbufs)); 2495 memset(&reqbufs, 0, sizeof(reqbufs));
2495 reqbufs.count = 0; 2496 reqbufs.count = 0;
2496 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; 2497 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
2497 reqbufs.memory = V4L2_MEMORY_MMAP; 2498 reqbufs.memory = V4L2_MEMORY_MMAP;
2498 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs); 2499 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs);
2499 2500
2500 input_buffer_map_.clear(); 2501 input_buffer_map_.clear();
2501 free_input_buffers_.clear(); 2502 free_input_buffers_.clear();
2502 } 2503 }
2503 2504
2504 bool V4L2VideoDecodeAccelerator::DestroyOutputBuffers() { 2505 bool V4L2VideoDecodeAccelerator::DestroyOutputBuffers() {
2505 DVLOGF(3); 2506 VLOGF(2);
2506 DCHECK(!decoder_thread_.IsRunning() || 2507 DCHECK(!decoder_thread_.IsRunning() ||
2507 decoder_thread_.task_runner()->BelongsToCurrentThread()); 2508 decoder_thread_.task_runner()->BelongsToCurrentThread());
2508 DCHECK(!output_streamon_); 2509 DCHECK(!output_streamon_);
2509 bool success = true; 2510 bool success = true;
2510 2511
2511 if (output_buffer_map_.empty()) 2512 if (output_buffer_map_.empty())
2512 return true; 2513 return true;
2513 2514
2514 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { 2515 for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
2515 OutputRecord& output_record = output_buffer_map_[i]; 2516 OutputRecord& output_record = output_buffer_map_[i];
2516 2517
2517 if (output_record.egl_image != EGL_NO_IMAGE_KHR) { 2518 if (output_record.egl_image != EGL_NO_IMAGE_KHR) {
2518 child_task_runner_->PostTask( 2519 child_task_runner_->PostTask(
2519 FROM_HERE, 2520 FROM_HERE,
2520 base::Bind(base::IgnoreResult(&V4L2Device::DestroyEGLImage), device_, 2521 base::Bind(base::IgnoreResult(&V4L2Device::DestroyEGLImage), device_,
2521 egl_display_, output_record.egl_image)); 2522 egl_display_, output_record.egl_image));
2522 } 2523 }
2523 2524
2524 if (output_record.egl_sync != EGL_NO_SYNC_KHR) { 2525 if (output_record.egl_sync != EGL_NO_SYNC_KHR) {
2525 if (eglDestroySyncKHR(egl_display_, output_record.egl_sync) != EGL_TRUE) { 2526 if (eglDestroySyncKHR(egl_display_, output_record.egl_sync) != EGL_TRUE) {
2526 DVLOGF(1) << "eglDestroySyncKHR failed."; 2527 VLOGF(1) << "eglDestroySyncKHR failed.";
2527 success = false; 2528 success = false;
2528 } 2529 }
2529 } 2530 }
2530 2531
2531 DVLOGF(1) << "dismissing PictureBuffer id=" << output_record.picture_id; 2532 VLOGF(2) << "dismissing PictureBuffer id=" << output_record.picture_id;
2532 child_task_runner_->PostTask( 2533 child_task_runner_->PostTask(
2533 FROM_HERE, base::Bind(&Client::DismissPictureBuffer, client_, 2534 FROM_HERE, base::Bind(&Client::DismissPictureBuffer, client_,
2534 output_record.picture_id)); 2535 output_record.picture_id));
2535 } 2536 }
2536 2537
2537 struct v4l2_requestbuffers reqbufs; 2538 struct v4l2_requestbuffers reqbufs;
2538 memset(&reqbufs, 0, sizeof(reqbufs)); 2539 memset(&reqbufs, 0, sizeof(reqbufs));
2539 reqbufs.count = 0; 2540 reqbufs.count = 0;
2540 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 2541 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2541 reqbufs.memory = V4L2_MEMORY_MMAP; 2542 reqbufs.memory = V4L2_MEMORY_MMAP;
2542 if (device_->Ioctl(VIDIOC_REQBUFS, &reqbufs) != 0) { 2543 if (device_->Ioctl(VIDIOC_REQBUFS, &reqbufs) != 0) {
2543 PLOGF(ERROR) << "ioctl() failed: VIDIOC_REQBUFS"; 2544 PLOGF(ERROR) << "ioctl() failed: VIDIOC_REQBUFS";
2544 NOTIFY_ERROR(PLATFORM_FAILURE); 2545 NOTIFY_ERROR(PLATFORM_FAILURE);
2545 success = false; 2546 success = false;
2546 } 2547 }
2547 2548
2548 output_buffer_map_.clear(); 2549 output_buffer_map_.clear();
2549 while (!free_output_buffers_.empty()) 2550 while (!free_output_buffers_.empty())
2550 free_output_buffers_.pop_front(); 2551 free_output_buffers_.pop_front();
2551 output_buffer_queued_count_ = 0; 2552 output_buffer_queued_count_ = 0;
2552 // The client may still hold some buffers. The texture holds a reference to 2553 // The client may still hold some buffers. The texture holds a reference to
2553 // the buffer. It is OK to free the buffer and destroy EGLImage here. 2554 // the buffer. It is OK to free the buffer and destroy EGLImage here.
2554 decoder_frames_at_client_ = 0; 2555 decoder_frames_at_client_ = 0;
2555 2556
2556 return success; 2557 return success;
2557 } 2558 }
2558 2559
2559 void V4L2VideoDecodeAccelerator::SendPictureReady() { 2560 void V4L2VideoDecodeAccelerator::SendPictureReady() {
2560 DVLOGF(3); 2561 DVLOGF(4);
2561 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 2562 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
2562 bool send_now = (decoder_state_ == kChangingResolution || 2563 bool send_now = (decoder_state_ == kChangingResolution ||
2563 decoder_state_ == kResetting || decoder_flushing_); 2564 decoder_state_ == kResetting || decoder_flushing_);
2564 while (pending_picture_ready_.size() > 0) { 2565 while (pending_picture_ready_.size() > 0) {
2565 bool cleared = pending_picture_ready_.front().cleared; 2566 bool cleared = pending_picture_ready_.front().cleared;
2566 const Picture& picture = pending_picture_ready_.front().picture; 2567 const Picture& picture = pending_picture_ready_.front().picture;
2567 if (cleared && picture_clearing_count_ == 0) { 2568 if (cleared && picture_clearing_count_ == 0) {
2568 // This picture is cleared. It can be posted to a thread different than 2569 // This picture is cleared. It can be posted to a thread different than
2569 // the main GPU thread to reduce latency. This should be the case after 2570 // the main GPU thread to reduce latency. This should be the case after
2570 // all pictures are cleared at the beginning. 2571 // all pictures are cleared at the beginning.
2571 decode_task_runner_->PostTask( 2572 decode_task_runner_->PostTask(
2572 FROM_HERE, 2573 FROM_HERE,
2573 base::Bind(&Client::PictureReady, decode_client_, picture)); 2574 base::Bind(&Client::PictureReady, decode_client_, picture));
2574 pending_picture_ready_.pop(); 2575 pending_picture_ready_.pop();
2575 } else if (!cleared || send_now) { 2576 } else if (!cleared || send_now) {
2576 DVLOGF(3) << "cleared=" << pending_picture_ready_.front().cleared 2577 DVLOGF(4) << "cleared=" << pending_picture_ready_.front().cleared
2577 << ", decoder_state_=" << decoder_state_ 2578 << ", decoder_state_=" << decoder_state_
2578 << ", decoder_flushing_=" << decoder_flushing_ 2579 << ", decoder_flushing_=" << decoder_flushing_
2579 << ", picture_clearing_count_=" << picture_clearing_count_; 2580 << ", picture_clearing_count_=" << picture_clearing_count_;
2580 // If the picture is not cleared, post it to the child thread because it 2581 // If the picture is not cleared, post it to the child thread because it
2581 // has to be cleared in the child thread. A picture only needs to be 2582 // has to be cleared in the child thread. A picture only needs to be
2582 // cleared once. If the decoder is changing resolution, resetting or 2583 // cleared once. If the decoder is changing resolution, resetting or
2583 // flushing, send all pictures to ensure PictureReady arrive before 2584 // flushing, send all pictures to ensure PictureReady arrive before
2584 // ProvidePictureBuffers, NotifyResetDone, or NotifyFlushDone. 2585 // ProvidePictureBuffers, NotifyResetDone, or NotifyFlushDone.
2585 child_task_runner_->PostTaskAndReply( 2586 child_task_runner_->PostTaskAndReply(
2586 FROM_HERE, base::Bind(&Client::PictureReady, client_, picture), 2587 FROM_HERE, base::Bind(&Client::PictureReady, client_, picture),
2587 // Unretained is safe. If Client::PictureReady gets to run, |this| is 2588 // Unretained is safe. If Client::PictureReady gets to run, |this| is
2588 // alive. Destroy() will wait the decode thread to finish. 2589 // alive. Destroy() will wait the decode thread to finish.
2589 base::Bind(&V4L2VideoDecodeAccelerator::PictureCleared, 2590 base::Bind(&V4L2VideoDecodeAccelerator::PictureCleared,
2590 base::Unretained(this))); 2591 base::Unretained(this)));
2591 picture_clearing_count_++; 2592 picture_clearing_count_++;
2592 pending_picture_ready_.pop(); 2593 pending_picture_ready_.pop();
2593 } else { 2594 } else {
2594 // This picture is cleared. But some pictures are about to be cleared on 2595 // This picture is cleared. But some pictures are about to be cleared on
2595 // the child thread. To preserve the order, do not send this until those 2596 // the child thread. To preserve the order, do not send this until those
2596 // pictures are cleared. 2597 // pictures are cleared.
2597 break; 2598 break;
2598 } 2599 }
2599 } 2600 }
2600 } 2601 }
2601 2602
2602 void V4L2VideoDecodeAccelerator::PictureCleared() { 2603 void V4L2VideoDecodeAccelerator::PictureCleared() {
2603 DVLOGF(3) << "clearing count=" << picture_clearing_count_; 2604 DVLOGF(4) << "clearing count=" << picture_clearing_count_;
2604 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 2605 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
2605 DCHECK_GT(picture_clearing_count_, 0); 2606 DCHECK_GT(picture_clearing_count_, 0);
2606 picture_clearing_count_--; 2607 picture_clearing_count_--;
2607 SendPictureReady(); 2608 SendPictureReady();
2608 } 2609 }
2609 2610
2610 void V4L2VideoDecodeAccelerator::FrameProcessed(int32_t bitstream_buffer_id, 2611 void V4L2VideoDecodeAccelerator::FrameProcessed(int32_t bitstream_buffer_id,
2611 int output_buffer_index) { 2612 int output_buffer_index) {
2612 DVLOGF(3) << "output_buffer_index=" << output_buffer_index 2613 DVLOGF(4) << "output_buffer_index=" << output_buffer_index
2613 << ", bitstream_buffer_id=" << bitstream_buffer_id; 2614 << ", bitstream_buffer_id=" << bitstream_buffer_id;
2614 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 2615 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
2615 DCHECK(!image_processor_bitstream_buffer_ids_.empty()); 2616 DCHECK(!image_processor_bitstream_buffer_ids_.empty());
2616 DCHECK(image_processor_bitstream_buffer_ids_.front() == bitstream_buffer_id); 2617 DCHECK(image_processor_bitstream_buffer_ids_.front() == bitstream_buffer_id);
2617 DCHECK_GE(output_buffer_index, 0); 2618 DCHECK_GE(output_buffer_index, 0);
2618 DCHECK_LT(output_buffer_index, static_cast<int>(output_buffer_map_.size())); 2619 DCHECK_LT(output_buffer_index, static_cast<int>(output_buffer_map_.size()));
2619 2620
2620 OutputRecord& output_record = output_buffer_map_[output_buffer_index]; 2621 OutputRecord& output_record = output_buffer_map_[output_buffer_index];
2621 DVLOGF(3) << "picture_id=" << output_record.picture_id; 2622 DVLOGF(4) << "picture_id=" << output_record.picture_id;
2622 DCHECK_EQ(output_record.state, kAtProcessor); 2623 DCHECK_EQ(output_record.state, kAtProcessor);
2623 DCHECK_NE(output_record.picture_id, -1); 2624 DCHECK_NE(output_record.picture_id, -1);
2624 2625
2625 // Send the processed frame to render. 2626 // Send the processed frame to render.
2626 output_record.state = kAtClient; 2627 output_record.state = kAtClient;
2627 decoder_frames_at_client_++; 2628 decoder_frames_at_client_++;
2628 image_processor_bitstream_buffer_ids_.pop(); 2629 image_processor_bitstream_buffer_ids_.pop();
2629 // TODO(hubbe): Insert correct color space. http://crbug.com/647725 2630 // TODO(hubbe): Insert correct color space. http://crbug.com/647725
2630 const Picture picture(output_record.picture_id, bitstream_buffer_id, 2631 const Picture picture(output_record.picture_id, bitstream_buffer_id,
2631 gfx::Rect(visible_size_), gfx::ColorSpace(), false); 2632 gfx::Rect(visible_size_), gfx::ColorSpace(), false);
2632 pending_picture_ready_.push(PictureRecord(output_record.cleared, picture)); 2633 pending_picture_ready_.push(PictureRecord(output_record.cleared, picture));
2633 SendPictureReady(); 2634 SendPictureReady();
2634 output_record.cleared = true; 2635 output_record.cleared = true;
2635 // Flush or resolution change may be waiting image processor to finish. 2636 // Flush or resolution change may be waiting image processor to finish.
2636 if (image_processor_bitstream_buffer_ids_.empty()) { 2637 if (image_processor_bitstream_buffer_ids_.empty()) {
2637 NotifyFlushDoneIfNeeded(); 2638 NotifyFlushDoneIfNeeded();
2638 if (decoder_state_ == kChangingResolution) 2639 if (decoder_state_ == kChangingResolution)
2639 StartResolutionChange(); 2640 StartResolutionChange();
2640 } 2641 }
2641 } 2642 }
2642 2643
2643 void V4L2VideoDecodeAccelerator::ImageProcessorError() { 2644 void V4L2VideoDecodeAccelerator::ImageProcessorError() {
2644 LOGF(ERROR) << "Image processor error"; 2645 LOGF(ERROR) << "Image processor error";
2645 NOTIFY_ERROR(PLATFORM_FAILURE); 2646 NOTIFY_ERROR(PLATFORM_FAILURE);
2646 } 2647 }
2647 2648
2648 } // namespace media 2649 } // namespace media
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698