OLD | NEW |
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include <errno.h> | 5 #include <errno.h> |
6 #include <fcntl.h> | 6 #include <fcntl.h> |
7 #include <linux/videodev2.h> | 7 #include <linux/videodev2.h> |
8 #include <poll.h> | 8 #include <poll.h> |
9 #include <string.h> | 9 #include <string.h> |
10 #include <sys/eventfd.h> | 10 #include <sys/eventfd.h> |
11 #include <sys/ioctl.h> | 11 #include <sys/ioctl.h> |
12 #include <sys/mman.h> | 12 #include <sys/mman.h> |
13 | 13 |
14 #include "base/bind.h" | 14 #include "base/bind.h" |
15 #include "base/bind_helpers.h" | 15 #include "base/bind_helpers.h" |
16 #include "base/callback.h" | 16 #include "base/callback.h" |
17 #include "base/callback_helpers.h" | 17 #include "base/callback_helpers.h" |
18 #include "base/command_line.h" | 18 #include "base/command_line.h" |
19 #include "base/macros.h" | 19 #include "base/macros.h" |
20 #include "base/numerics/safe_conversions.h" | 20 #include "base/numerics/safe_conversions.h" |
21 #include "base/strings/stringprintf.h" | 21 #include "base/strings/stringprintf.h" |
22 #include "content/common/gpu/media/shared_memory_region.h" | 22 #include "content/common/gpu/media/shared_memory_region.h" |
23 #include "content/common/gpu/media/v4l2_slice_video_decode_accelerator.h" | 23 #include "content/common/gpu/media/v4l2_slice_video_decode_accelerator.h" |
24 #include "media/base/bind_to_current_loop.h" | 24 #include "media/base/bind_to_current_loop.h" |
25 #include "media/base/media_switches.h" | 25 #include "media/base/media_switches.h" |
26 #include "ui/gl/gl_context.h" | |
27 #include "ui/gl/scoped_binders.h" | 26 #include "ui/gl/scoped_binders.h" |
28 | 27 |
29 #define LOGF(level) LOG(level) << __FUNCTION__ << "(): " | 28 #define LOGF(level) LOG(level) << __FUNCTION__ << "(): " |
30 #define DVLOGF(level) DVLOG(level) << __FUNCTION__ << "(): " | 29 #define DVLOGF(level) DVLOG(level) << __FUNCTION__ << "(): " |
31 | 30 |
32 #define NOTIFY_ERROR(x) \ | 31 #define NOTIFY_ERROR(x) \ |
33 do { \ | 32 do { \ |
34 LOG(ERROR) << "Setting error state:" << x; \ | 33 LOG(ERROR) << "Setting error state:" << x; \ |
35 SetErrorState(x); \ | 34 SetErrorState(x); \ |
36 } while (0) | 35 } while (0) |
(...skipping 336 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
373 V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>& dec_surface) | 372 V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>& dec_surface) |
374 : dec_surface_(dec_surface) { | 373 : dec_surface_(dec_surface) { |
375 } | 374 } |
376 | 375 |
377 V4L2VP8Picture::~V4L2VP8Picture() { | 376 V4L2VP8Picture::~V4L2VP8Picture() { |
378 } | 377 } |
379 | 378 |
380 V4L2SliceVideoDecodeAccelerator::V4L2SliceVideoDecodeAccelerator( | 379 V4L2SliceVideoDecodeAccelerator::V4L2SliceVideoDecodeAccelerator( |
381 const scoped_refptr<V4L2Device>& device, | 380 const scoped_refptr<V4L2Device>& device, |
382 EGLDisplay egl_display, | 381 EGLDisplay egl_display, |
383 const GetGLContextCallback& get_gl_context_cb, | 382 EGLContext egl_context, |
384 const MakeGLContextCurrentCallback& make_context_current_cb) | 383 const base::WeakPtr<Client>& io_client, |
| 384 const base::Callback<bool(void)>& make_context_current, |
| 385 const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner) |
385 : input_planes_count_(0), | 386 : input_planes_count_(0), |
386 output_planes_count_(0), | 387 output_planes_count_(0), |
387 child_task_runner_(base::ThreadTaskRunnerHandle::Get()), | 388 child_task_runner_(base::ThreadTaskRunnerHandle::Get()), |
| 389 io_task_runner_(io_task_runner), |
| 390 io_client_(io_client), |
388 device_(device), | 391 device_(device), |
389 decoder_thread_("V4L2SliceVideoDecodeAcceleratorThread"), | 392 decoder_thread_("V4L2SliceVideoDecodeAcceleratorThread"), |
390 device_poll_thread_("V4L2SliceVideoDecodeAcceleratorDevicePollThread"), | 393 device_poll_thread_("V4L2SliceVideoDecodeAcceleratorDevicePollThread"), |
391 input_streamon_(false), | 394 input_streamon_(false), |
392 input_buffer_queued_count_(0), | 395 input_buffer_queued_count_(0), |
393 output_streamon_(false), | 396 output_streamon_(false), |
394 output_buffer_queued_count_(0), | 397 output_buffer_queued_count_(0), |
395 video_profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN), | 398 video_profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN), |
396 output_format_fourcc_(0), | 399 output_format_fourcc_(0), |
397 state_(kUninitialized), | 400 state_(kUninitialized), |
398 decoder_flushing_(false), | 401 decoder_flushing_(false), |
399 decoder_resetting_(false), | 402 decoder_resetting_(false), |
400 surface_set_change_pending_(false), | 403 surface_set_change_pending_(false), |
401 picture_clearing_count_(0), | 404 picture_clearing_count_(0), |
| 405 make_context_current_(make_context_current), |
402 egl_display_(egl_display), | 406 egl_display_(egl_display), |
403 get_gl_context_cb_(get_gl_context_cb), | 407 egl_context_(egl_context), |
404 make_context_current_cb_(make_context_current_cb), | |
405 weak_this_factory_(this) { | 408 weak_this_factory_(this) { |
406 weak_this_ = weak_this_factory_.GetWeakPtr(); | 409 weak_this_ = weak_this_factory_.GetWeakPtr(); |
407 } | 410 } |
408 | 411 |
409 V4L2SliceVideoDecodeAccelerator::~V4L2SliceVideoDecodeAccelerator() { | 412 V4L2SliceVideoDecodeAccelerator::~V4L2SliceVideoDecodeAccelerator() { |
410 DVLOGF(2); | 413 DVLOGF(2); |
411 | 414 |
412 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 415 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
413 DCHECK(!decoder_thread_.IsRunning()); | 416 DCHECK(!decoder_thread_.IsRunning()); |
414 DCHECK(!device_poll_thread_.IsRunning()); | 417 DCHECK(!device_poll_thread_.IsRunning()); |
(...skipping 15 matching lines...) Expand all Loading... |
430 client_ptr_factory_.reset(); | 433 client_ptr_factory_.reset(); |
431 } | 434 } |
432 } | 435 } |
433 | 436 |
434 bool V4L2SliceVideoDecodeAccelerator::Initialize(const Config& config, | 437 bool V4L2SliceVideoDecodeAccelerator::Initialize(const Config& config, |
435 Client* client) { | 438 Client* client) { |
436 DVLOGF(3) << "profile: " << config.profile; | 439 DVLOGF(3) << "profile: " << config.profile; |
437 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 440 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
438 DCHECK_EQ(state_, kUninitialized); | 441 DCHECK_EQ(state_, kUninitialized); |
439 | 442 |
440 if (get_gl_context_cb_.is_null() || make_context_current_cb_.is_null()) { | |
441 NOTREACHED() << "GL callbacks are required for this VDA"; | |
442 return false; | |
443 } | |
444 | |
445 if (config.is_encrypted) { | 443 if (config.is_encrypted) { |
446 NOTREACHED() << "Encrypted streams are not supported for this VDA"; | 444 NOTREACHED() << "Encrypted streams are not supported for this VDA"; |
447 return false; | 445 return false; |
448 } | 446 } |
449 | 447 |
450 if (!device_->SupportsDecodeProfileForV4L2PixelFormats( | 448 if (!device_->SupportsDecodeProfileForV4L2PixelFormats( |
451 config.profile, arraysize(supported_input_fourccs_), | 449 config.profile, arraysize(supported_input_fourccs_), |
452 supported_input_fourccs_)) { | 450 supported_input_fourccs_)) { |
453 DVLOGF(1) << "unsupported profile " << config.profile; | 451 DVLOGF(1) << "unsupported profile " << config.profile; |
454 return false; | 452 return false; |
455 } | 453 } |
456 | 454 |
457 client_ptr_factory_.reset( | 455 client_ptr_factory_.reset( |
458 new base::WeakPtrFactory<VideoDecodeAccelerator::Client>(client)); | 456 new base::WeakPtrFactory<VideoDecodeAccelerator::Client>(client)); |
459 client_ = client_ptr_factory_->GetWeakPtr(); | 457 client_ = client_ptr_factory_->GetWeakPtr(); |
460 // If we haven't been set up to decode on separate thread via | |
461 // TryToSetupDecodeOnSeparateThread(), use the main thread/client for | |
462 // decode tasks. | |
463 if (!decode_task_runner_) { | |
464 decode_task_runner_ = child_task_runner_; | |
465 DCHECK(!decode_client_); | |
466 decode_client_ = client_; | |
467 } | |
468 | 458 |
469 video_profile_ = config.profile; | 459 video_profile_ = config.profile; |
470 | 460 |
471 if (video_profile_ >= media::H264PROFILE_MIN && | 461 if (video_profile_ >= media::H264PROFILE_MIN && |
472 video_profile_ <= media::H264PROFILE_MAX) { | 462 video_profile_ <= media::H264PROFILE_MAX) { |
473 h264_accelerator_.reset(new V4L2H264Accelerator(this)); | 463 h264_accelerator_.reset(new V4L2H264Accelerator(this)); |
474 decoder_.reset(new H264Decoder(h264_accelerator_.get())); | 464 decoder_.reset(new H264Decoder(h264_accelerator_.get())); |
475 } else if (video_profile_ >= media::VP8PROFILE_MIN && | 465 } else if (video_profile_ >= media::VP8PROFILE_MIN && |
476 video_profile_ <= media::VP8PROFILE_MAX) { | 466 video_profile_ <= media::VP8PROFILE_MAX) { |
477 vp8_accelerator_.reset(new V4L2VP8Accelerator(this)); | 467 vp8_accelerator_.reset(new V4L2VP8Accelerator(this)); |
478 decoder_.reset(new VP8Decoder(vp8_accelerator_.get())); | 468 decoder_.reset(new VP8Decoder(vp8_accelerator_.get())); |
479 } else { | 469 } else { |
480 NOTREACHED() << "Unsupported profile " << video_profile_; | 470 NOTREACHED() << "Unsupported profile " << video_profile_; |
481 return false; | 471 return false; |
482 } | 472 } |
483 | 473 |
484 // TODO(posciak): This needs to be queried once supported. | 474 // TODO(posciak): This needs to be queried once supported. |
485 input_planes_count_ = 1; | 475 input_planes_count_ = 1; |
486 output_planes_count_ = 1; | 476 output_planes_count_ = 1; |
487 | 477 |
488 if (egl_display_ == EGL_NO_DISPLAY) { | 478 if (egl_display_ == EGL_NO_DISPLAY) { |
489 LOG(ERROR) << "Initialize(): could not get EGLDisplay"; | 479 LOG(ERROR) << "Initialize(): could not get EGLDisplay"; |
490 return false; | 480 return false; |
491 } | 481 } |
492 | 482 |
493 // We need the context to be initialized to query extensions. | 483 // We need the context to be initialized to query extensions. |
494 if (!make_context_current_cb_.Run()) { | 484 if (!make_context_current_.Run()) { |
495 LOG(ERROR) << "Initialize(): could not make context current"; | 485 LOG(ERROR) << "Initialize(): could not make context current"; |
496 return false; | 486 return false; |
497 } | 487 } |
498 | 488 |
499 if (!gfx::g_driver_egl.ext.b_EGL_KHR_fence_sync) { | 489 if (!gfx::g_driver_egl.ext.b_EGL_KHR_fence_sync) { |
500 LOG(ERROR) << "Initialize(): context does not have EGL_KHR_fence_sync"; | 490 LOG(ERROR) << "Initialize(): context does not have EGL_KHR_fence_sync"; |
501 return false; | 491 return false; |
502 } | 492 } |
503 | 493 |
504 // Capabilities check. | 494 // Capabilities check. |
(...skipping 683 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1188 decoder_display_queue_.pop(); | 1178 decoder_display_queue_.pop(); |
1189 | 1179 |
1190 DVLOGF(3) << "Device poll stopped"; | 1180 DVLOGF(3) << "Device poll stopped"; |
1191 return true; | 1181 return true; |
1192 } | 1182 } |
1193 | 1183 |
1194 void V4L2SliceVideoDecodeAccelerator::Decode( | 1184 void V4L2SliceVideoDecodeAccelerator::Decode( |
1195 const media::BitstreamBuffer& bitstream_buffer) { | 1185 const media::BitstreamBuffer& bitstream_buffer) { |
1196 DVLOGF(3) << "input_id=" << bitstream_buffer.id() | 1186 DVLOGF(3) << "input_id=" << bitstream_buffer.id() |
1197 << ", size=" << bitstream_buffer.size(); | 1187 << ", size=" << bitstream_buffer.size(); |
1198 DCHECK(decode_task_runner_->BelongsToCurrentThread()); | 1188 DCHECK(io_task_runner_->BelongsToCurrentThread()); |
1199 | 1189 |
1200 if (bitstream_buffer.id() < 0) { | 1190 if (bitstream_buffer.id() < 0) { |
1201 LOG(ERROR) << "Invalid bitstream_buffer, id: " << bitstream_buffer.id(); | 1191 LOG(ERROR) << "Invalid bitstream_buffer, id: " << bitstream_buffer.id(); |
1202 if (base::SharedMemory::IsHandleValid(bitstream_buffer.handle())) | 1192 if (base::SharedMemory::IsHandleValid(bitstream_buffer.handle())) |
1203 base::SharedMemory::CloseHandle(bitstream_buffer.handle()); | 1193 base::SharedMemory::CloseHandle(bitstream_buffer.handle()); |
1204 NOTIFY_ERROR(INVALID_ARGUMENT); | 1194 NOTIFY_ERROR(INVALID_ARGUMENT); |
1205 return; | 1195 return; |
1206 } | 1196 } |
1207 | 1197 |
1208 decoder_thread_task_runner_->PostTask( | 1198 decoder_thread_task_runner_->PostTask( |
1209 FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::DecodeTask, | 1199 FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::DecodeTask, |
1210 base::Unretained(this), bitstream_buffer)); | 1200 base::Unretained(this), bitstream_buffer)); |
1211 } | 1201 } |
1212 | 1202 |
1213 void V4L2SliceVideoDecodeAccelerator::DecodeTask( | 1203 void V4L2SliceVideoDecodeAccelerator::DecodeTask( |
1214 const media::BitstreamBuffer& bitstream_buffer) { | 1204 const media::BitstreamBuffer& bitstream_buffer) { |
1215 DVLOGF(3) << "input_id=" << bitstream_buffer.id() | 1205 DVLOGF(3) << "input_id=" << bitstream_buffer.id() |
1216 << " size=" << bitstream_buffer.size(); | 1206 << " size=" << bitstream_buffer.size(); |
1217 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 1207 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
1218 | 1208 |
1219 scoped_ptr<BitstreamBufferRef> bitstream_record(new BitstreamBufferRef( | 1209 scoped_ptr<BitstreamBufferRef> bitstream_record(new BitstreamBufferRef( |
1220 decode_client_, decode_task_runner_, | 1210 io_client_, io_task_runner_, |
1221 new SharedMemoryRegion(bitstream_buffer, true), bitstream_buffer.id())); | 1211 new SharedMemoryRegion(bitstream_buffer, true), bitstream_buffer.id())); |
1222 if (!bitstream_record->shm->Map()) { | 1212 if (!bitstream_record->shm->Map()) { |
1223 LOGF(ERROR) << "Could not map bitstream_buffer"; | 1213 LOGF(ERROR) << "Could not map bitstream_buffer"; |
1224 NOTIFY_ERROR(UNREADABLE_INPUT); | 1214 NOTIFY_ERROR(UNREADABLE_INPUT); |
1225 return; | 1215 return; |
1226 } | 1216 } |
1227 DVLOGF(3) << "mapped at=" << bitstream_record->shm->memory(); | 1217 DVLOGF(3) << "mapped at=" << bitstream_record->shm->memory(); |
1228 | 1218 |
1229 decoder_input_queue_.push( | 1219 decoder_input_queue_.push( |
1230 linked_ptr<BitstreamBufferRef>(bitstream_record.release())); | 1220 linked_ptr<BitstreamBufferRef>(bitstream_record.release())); |
(...skipping 260 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1491 output_planes_count_)); | 1481 output_planes_count_)); |
1492 } | 1482 } |
1493 | 1483 |
1494 void V4L2SliceVideoDecodeAccelerator::CreateEGLImages( | 1484 void V4L2SliceVideoDecodeAccelerator::CreateEGLImages( |
1495 const std::vector<media::PictureBuffer>& buffers, | 1485 const std::vector<media::PictureBuffer>& buffers, |
1496 uint32_t output_format_fourcc, | 1486 uint32_t output_format_fourcc, |
1497 size_t output_planes_count) { | 1487 size_t output_planes_count) { |
1498 DVLOGF(3); | 1488 DVLOGF(3); |
1499 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 1489 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
1500 | 1490 |
1501 gfx::GLContext* gl_context = get_gl_context_cb_.Run(); | 1491 if (!make_context_current_.Run()) { |
1502 if (!gl_context || !make_context_current_cb_.Run()) { | 1492 DLOG(ERROR) << "could not make context current"; |
1503 DLOG(ERROR) << "No GL context"; | |
1504 NOTIFY_ERROR(PLATFORM_FAILURE); | 1493 NOTIFY_ERROR(PLATFORM_FAILURE); |
1505 return; | 1494 return; |
1506 } | 1495 } |
1507 | 1496 |
1508 gfx::ScopedTextureBinder bind_restore(GL_TEXTURE_EXTERNAL_OES, 0); | 1497 gfx::ScopedTextureBinder bind_restore(GL_TEXTURE_EXTERNAL_OES, 0); |
1509 | 1498 |
1510 std::vector<EGLImageKHR> egl_images; | 1499 std::vector<EGLImageKHR> egl_images; |
1511 for (size_t i = 0; i < buffers.size(); ++i) { | 1500 for (size_t i = 0; i < buffers.size(); ++i) { |
1512 EGLImageKHR egl_image = device_->CreateEGLImage(egl_display_, | 1501 EGLImageKHR egl_image = device_->CreateEGLImage(egl_display_, |
1513 gl_context->GetHandle(), | 1502 egl_context_, |
1514 buffers[i].texture_id(), | 1503 buffers[i].texture_id(), |
1515 buffers[i].size(), | 1504 buffers[i].size(), |
1516 i, | 1505 i, |
1517 output_format_fourcc, | 1506 output_format_fourcc, |
1518 output_planes_count); | 1507 output_planes_count); |
1519 if (egl_image == EGL_NO_IMAGE_KHR) { | 1508 if (egl_image == EGL_NO_IMAGE_KHR) { |
1520 LOGF(ERROR) << "Could not create EGLImageKHR"; | 1509 LOGF(ERROR) << "Could not create EGLImageKHR"; |
1521 for (const auto& image_to_destroy : egl_images) | 1510 for (const auto& image_to_destroy : egl_images) |
1522 device_->DestroyEGLImage(egl_display_, image_to_destroy); | 1511 device_->DestroyEGLImage(egl_display_, image_to_destroy); |
1523 | 1512 |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1569 } | 1558 } |
1570 | 1559 |
1571 ProcessPendingEventsIfNeeded(); | 1560 ProcessPendingEventsIfNeeded(); |
1572 } | 1561 } |
1573 | 1562 |
1574 void V4L2SliceVideoDecodeAccelerator::ReusePictureBuffer( | 1563 void V4L2SliceVideoDecodeAccelerator::ReusePictureBuffer( |
1575 int32_t picture_buffer_id) { | 1564 int32_t picture_buffer_id) { |
1576 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 1565 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
1577 DVLOGF(4) << "picture_buffer_id=" << picture_buffer_id; | 1566 DVLOGF(4) << "picture_buffer_id=" << picture_buffer_id; |
1578 | 1567 |
1579 if (!make_context_current_cb_.Run()) { | 1568 if (!make_context_current_.Run()) { |
1580 LOGF(ERROR) << "could not make context current"; | 1569 LOGF(ERROR) << "could not make context current"; |
1581 NOTIFY_ERROR(PLATFORM_FAILURE); | 1570 NOTIFY_ERROR(PLATFORM_FAILURE); |
1582 return; | 1571 return; |
1583 } | 1572 } |
1584 | 1573 |
1585 EGLSyncKHR egl_sync = | 1574 EGLSyncKHR egl_sync = |
1586 eglCreateSyncKHR(egl_display_, EGL_SYNC_FENCE_KHR, NULL); | 1575 eglCreateSyncKHR(egl_display_, EGL_SYNC_FENCE_KHR, NULL); |
1587 if (egl_sync == EGL_NO_SYNC_KHR) { | 1576 if (egl_sync == EGL_NO_SYNC_KHR) { |
1588 LOGF(ERROR) << "eglCreateSyncKHR() failed"; | 1577 LOGF(ERROR) << "eglCreateSyncKHR() failed"; |
1589 NOTIFY_ERROR(PLATFORM_FAILURE); | 1578 NOTIFY_ERROR(PLATFORM_FAILURE); |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1645 | 1634 |
1646 void V4L2SliceVideoDecodeAccelerator::FlushTask() { | 1635 void V4L2SliceVideoDecodeAccelerator::FlushTask() { |
1647 DVLOGF(3); | 1636 DVLOGF(3); |
1648 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 1637 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
1649 | 1638 |
1650 if (!decoder_input_queue_.empty()) { | 1639 if (!decoder_input_queue_.empty()) { |
1651 // We are not done with pending inputs, so queue an empty buffer, | 1640 // We are not done with pending inputs, so queue an empty buffer, |
1652 // which - when reached - will trigger flush sequence. | 1641 // which - when reached - will trigger flush sequence. |
1653 decoder_input_queue_.push( | 1642 decoder_input_queue_.push( |
1654 linked_ptr<BitstreamBufferRef>(new BitstreamBufferRef( | 1643 linked_ptr<BitstreamBufferRef>(new BitstreamBufferRef( |
1655 decode_client_, decode_task_runner_, nullptr, kFlushBufferId))); | 1644 io_client_, io_task_runner_, nullptr, kFlushBufferId))); |
1656 return; | 1645 return; |
1657 } | 1646 } |
1658 | 1647 |
1659 // No more inputs pending, so just finish flushing here. | 1648 // No more inputs pending, so just finish flushing here. |
1660 InitiateFlush(); | 1649 InitiateFlush(); |
1661 } | 1650 } |
1662 | 1651 |
1663 void V4L2SliceVideoDecodeAccelerator::InitiateFlush() { | 1652 void V4L2SliceVideoDecodeAccelerator::InitiateFlush() { |
1664 DVLOGF(3); | 1653 DVLOGF(3); |
1665 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 1654 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
(...skipping 890 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2556 } | 2545 } |
2557 | 2546 |
2558 void V4L2SliceVideoDecodeAccelerator::SendPictureReady() { | 2547 void V4L2SliceVideoDecodeAccelerator::SendPictureReady() { |
2559 DVLOGF(3); | 2548 DVLOGF(3); |
2560 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 2549 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
2561 bool resetting_or_flushing = (decoder_resetting_ || decoder_flushing_); | 2550 bool resetting_or_flushing = (decoder_resetting_ || decoder_flushing_); |
2562 while (!pending_picture_ready_.empty()) { | 2551 while (!pending_picture_ready_.empty()) { |
2563 bool cleared = pending_picture_ready_.front().cleared; | 2552 bool cleared = pending_picture_ready_.front().cleared; |
2564 const media::Picture& picture = pending_picture_ready_.front().picture; | 2553 const media::Picture& picture = pending_picture_ready_.front().picture; |
2565 if (cleared && picture_clearing_count_ == 0) { | 2554 if (cleared && picture_clearing_count_ == 0) { |
2566 DVLOGF(4) << "Posting picture ready to decode task runner for: " | 2555 DVLOGF(4) << "Posting picture ready to IO for: " |
2567 << picture.picture_buffer_id(); | 2556 << picture.picture_buffer_id(); |
2568 // This picture is cleared. It can be posted to a thread different than | 2557 // This picture is cleared. Post it to IO thread to reduce latency. This |
2569 // the main GPU thread to reduce latency. This should be the case after | 2558 // should be the case after all pictures are cleared at the beginning. |
2570 // all pictures are cleared at the beginning. | 2559 io_task_runner_->PostTask( |
2571 decode_task_runner_->PostTask( | 2560 FROM_HERE, base::Bind(&Client::PictureReady, io_client_, picture)); |
2572 FROM_HERE, | |
2573 base::Bind(&Client::PictureReady, decode_client_, picture)); | |
2574 pending_picture_ready_.pop(); | 2561 pending_picture_ready_.pop(); |
2575 } else if (!cleared || resetting_or_flushing) { | 2562 } else if (!cleared || resetting_or_flushing) { |
2576 DVLOGF(3) << "cleared=" << pending_picture_ready_.front().cleared | 2563 DVLOGF(3) << "cleared=" << pending_picture_ready_.front().cleared |
2577 << ", decoder_resetting_=" << decoder_resetting_ | 2564 << ", decoder_resetting_=" << decoder_resetting_ |
2578 << ", decoder_flushing_=" << decoder_flushing_ | 2565 << ", decoder_flushing_=" << decoder_flushing_ |
2579 << ", picture_clearing_count_=" << picture_clearing_count_; | 2566 << ", picture_clearing_count_=" << picture_clearing_count_; |
2580 DVLOGF(4) << "Posting picture ready to GPU for: " | 2567 DVLOGF(4) << "Posting picture ready to GPU for: " |
2581 << picture.picture_buffer_id(); | 2568 << picture.picture_buffer_id(); |
2582 // If the picture is not cleared, post it to the child thread because it | 2569 // If the picture is not cleared, post it to the child thread because it |
2583 // has to be cleared in the child thread. A picture only needs to be | 2570 // has to be cleared in the child thread. A picture only needs to be |
(...skipping 17 matching lines...) Expand all Loading... |
2601 } | 2588 } |
2602 | 2589 |
2603 void V4L2SliceVideoDecodeAccelerator::PictureCleared() { | 2590 void V4L2SliceVideoDecodeAccelerator::PictureCleared() { |
2604 DVLOGF(3) << "clearing count=" << picture_clearing_count_; | 2591 DVLOGF(3) << "clearing count=" << picture_clearing_count_; |
2605 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 2592 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
2606 DCHECK_GT(picture_clearing_count_, 0); | 2593 DCHECK_GT(picture_clearing_count_, 0); |
2607 picture_clearing_count_--; | 2594 picture_clearing_count_--; |
2608 SendPictureReady(); | 2595 SendPictureReady(); |
2609 } | 2596 } |
2610 | 2597 |
2611 bool V4L2SliceVideoDecodeAccelerator::TryToSetupDecodeOnSeparateThread( | 2598 bool V4L2SliceVideoDecodeAccelerator::CanDecodeOnIOThread() { |
2612 const base::WeakPtr<Client>& decode_client, | |
2613 const scoped_refptr<base::SingleThreadTaskRunner>& decode_task_runner) { | |
2614 decode_client_ = decode_client_; | |
2615 decode_task_runner_ = decode_task_runner; | |
2616 return true; | 2599 return true; |
2617 } | 2600 } |
2618 | 2601 |
2619 // static | 2602 // static |
2620 media::VideoDecodeAccelerator::SupportedProfiles | 2603 media::VideoDecodeAccelerator::SupportedProfiles |
2621 V4L2SliceVideoDecodeAccelerator::GetSupportedProfiles() { | 2604 V4L2SliceVideoDecodeAccelerator::GetSupportedProfiles() { |
2622 scoped_refptr<V4L2Device> device = V4L2Device::Create(V4L2Device::kDecoder); | 2605 scoped_refptr<V4L2Device> device = V4L2Device::Create(V4L2Device::kDecoder); |
2623 if (!device) | 2606 if (!device) |
2624 return SupportedProfiles(); | 2607 return SupportedProfiles(); |
2625 | 2608 |
2626 return device->GetSupportedDecodeProfiles(arraysize(supported_input_fourccs_), | 2609 return device->GetSupportedDecodeProfiles(arraysize(supported_input_fourccs_), |
2627 supported_input_fourccs_); | 2610 supported_input_fourccs_); |
2628 } | 2611 } |
2629 | 2612 |
2630 } // namespace content | 2613 } // namespace content |
OLD | NEW |