OLD | NEW |
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include <errno.h> | 5 #include <errno.h> |
6 #include <fcntl.h> | 6 #include <fcntl.h> |
7 #include <linux/videodev2.h> | 7 #include <linux/videodev2.h> |
8 #include <poll.h> | 8 #include <poll.h> |
9 #include <string.h> | 9 #include <string.h> |
10 #include <sys/eventfd.h> | 10 #include <sys/eventfd.h> |
11 #include <sys/ioctl.h> | 11 #include <sys/ioctl.h> |
12 #include <sys/mman.h> | 12 #include <sys/mman.h> |
13 | 13 |
14 #include "base/bind.h" | 14 #include "base/bind.h" |
15 #include "base/bind_helpers.h" | 15 #include "base/bind_helpers.h" |
16 #include "base/callback.h" | 16 #include "base/callback.h" |
17 #include "base/callback_helpers.h" | 17 #include "base/callback_helpers.h" |
18 #include "base/command_line.h" | 18 #include "base/command_line.h" |
19 #include "base/macros.h" | 19 #include "base/macros.h" |
20 #include "base/numerics/safe_conversions.h" | 20 #include "base/numerics/safe_conversions.h" |
21 #include "base/strings/stringprintf.h" | 21 #include "base/strings/stringprintf.h" |
22 #include "content/common/gpu/media/v4l2_slice_video_decode_accelerator.h" | 22 #include "content/common/gpu/media/v4l2_slice_video_decode_accelerator.h" |
| 23 #include "content/public/common/gpu_video_decode_accelerator_helpers.h" |
23 #include "media/base/bind_to_current_loop.h" | 24 #include "media/base/bind_to_current_loop.h" |
24 #include "media/base/media_switches.h" | 25 #include "media/base/media_switches.h" |
| 26 #include "ui/gl/gl_context.h" |
25 #include "ui/gl/scoped_binders.h" | 27 #include "ui/gl/scoped_binders.h" |
26 | 28 |
27 #define LOGF(level) LOG(level) << __FUNCTION__ << "(): " | 29 #define LOGF(level) LOG(level) << __FUNCTION__ << "(): " |
28 #define DVLOGF(level) DVLOG(level) << __FUNCTION__ << "(): " | 30 #define DVLOGF(level) DVLOG(level) << __FUNCTION__ << "(): " |
29 | 31 |
30 #define NOTIFY_ERROR(x) \ | 32 #define NOTIFY_ERROR(x) \ |
31 do { \ | 33 do { \ |
32 LOG(ERROR) << "Setting error state:" << x; \ | 34 LOG(ERROR) << "Setting error state:" << x; \ |
33 SetErrorState(x); \ | 35 SetErrorState(x); \ |
34 } while (0) | 36 } while (0) |
(...skipping 340 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
375 V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>& dec_surface) | 377 V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>& dec_surface) |
376 : dec_surface_(dec_surface) { | 378 : dec_surface_(dec_surface) { |
377 } | 379 } |
378 | 380 |
379 V4L2VP8Picture::~V4L2VP8Picture() { | 381 V4L2VP8Picture::~V4L2VP8Picture() { |
380 } | 382 } |
381 | 383 |
382 V4L2SliceVideoDecodeAccelerator::V4L2SliceVideoDecodeAccelerator( | 384 V4L2SliceVideoDecodeAccelerator::V4L2SliceVideoDecodeAccelerator( |
383 const scoped_refptr<V4L2Device>& device, | 385 const scoped_refptr<V4L2Device>& device, |
384 EGLDisplay egl_display, | 386 EGLDisplay egl_display, |
385 EGLContext egl_context, | 387 const gpu_vda_helpers::GetGLContextCb& get_gl_context_cb, |
386 const base::WeakPtr<Client>& io_client, | 388 const gpu_vda_helpers::MakeGLContextCurrentCb& make_context_current_cb) |
387 const base::Callback<bool(void)>& make_context_current, | |
388 const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner) | |
389 : input_planes_count_(0), | 389 : input_planes_count_(0), |
390 output_planes_count_(0), | 390 output_planes_count_(0), |
391 child_task_runner_(base::ThreadTaskRunnerHandle::Get()), | 391 child_task_runner_(base::ThreadTaskRunnerHandle::Get()), |
392 io_task_runner_(io_task_runner), | |
393 io_client_(io_client), | |
394 device_(device), | 392 device_(device), |
395 decoder_thread_("V4L2SliceVideoDecodeAcceleratorThread"), | 393 decoder_thread_("V4L2SliceVideoDecodeAcceleratorThread"), |
396 device_poll_thread_("V4L2SliceVideoDecodeAcceleratorDevicePollThread"), | 394 device_poll_thread_("V4L2SliceVideoDecodeAcceleratorDevicePollThread"), |
397 input_streamon_(false), | 395 input_streamon_(false), |
398 input_buffer_queued_count_(0), | 396 input_buffer_queued_count_(0), |
399 output_streamon_(false), | 397 output_streamon_(false), |
400 output_buffer_queued_count_(0), | 398 output_buffer_queued_count_(0), |
401 video_profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN), | 399 video_profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN), |
402 output_format_fourcc_(0), | 400 output_format_fourcc_(0), |
403 state_(kUninitialized), | 401 state_(kUninitialized), |
404 decoder_flushing_(false), | 402 decoder_flushing_(false), |
405 decoder_resetting_(false), | 403 decoder_resetting_(false), |
406 surface_set_change_pending_(false), | 404 surface_set_change_pending_(false), |
407 picture_clearing_count_(0), | 405 picture_clearing_count_(0), |
408 make_context_current_(make_context_current), | |
409 egl_display_(egl_display), | 406 egl_display_(egl_display), |
410 egl_context_(egl_context), | 407 get_gl_context_cb_(get_gl_context_cb), |
| 408 make_context_current_cb_(make_context_current_cb), |
411 weak_this_factory_(this) { | 409 weak_this_factory_(this) { |
412 weak_this_ = weak_this_factory_.GetWeakPtr(); | 410 weak_this_ = weak_this_factory_.GetWeakPtr(); |
413 } | 411 } |
414 | 412 |
415 V4L2SliceVideoDecodeAccelerator::~V4L2SliceVideoDecodeAccelerator() { | 413 V4L2SliceVideoDecodeAccelerator::~V4L2SliceVideoDecodeAccelerator() { |
416 DVLOGF(2); | 414 DVLOGF(2); |
417 | 415 |
418 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 416 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
419 DCHECK(!decoder_thread_.IsRunning()); | 417 DCHECK(!decoder_thread_.IsRunning()); |
420 DCHECK(!device_poll_thread_.IsRunning()); | 418 DCHECK(!device_poll_thread_.IsRunning()); |
(...skipping 15 matching lines...) Expand all Loading... |
436 client_ptr_factory_.reset(); | 434 client_ptr_factory_.reset(); |
437 } | 435 } |
438 } | 436 } |
439 | 437 |
440 bool V4L2SliceVideoDecodeAccelerator::Initialize(const Config& config, | 438 bool V4L2SliceVideoDecodeAccelerator::Initialize(const Config& config, |
441 Client* client) { | 439 Client* client) { |
442 DVLOGF(3) << "profile: " << config.profile; | 440 DVLOGF(3) << "profile: " << config.profile; |
443 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 441 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
444 DCHECK_EQ(state_, kUninitialized); | 442 DCHECK_EQ(state_, kUninitialized); |
445 | 443 |
| 444 if (get_gl_context_cb_.is_null() || make_context_current_cb_.is_null()) { |
| 445 NOTREACHED() << "GL callbacks are required for this VDA"; |
| 446 return false; |
| 447 } |
| 448 |
446 if (config.is_encrypted) { | 449 if (config.is_encrypted) { |
447 NOTREACHED() << "Encrypted streams are not supported for this VDA"; | 450 NOTREACHED() << "Encrypted streams are not supported for this VDA"; |
448 return false; | 451 return false; |
449 } | 452 } |
450 | 453 |
451 if (!device_->SupportsDecodeProfileForV4L2PixelFormats( | 454 if (!device_->SupportsDecodeProfileForV4L2PixelFormats( |
452 config.profile, arraysize(supported_input_fourccs_), | 455 config.profile, arraysize(supported_input_fourccs_), |
453 supported_input_fourccs_)) { | 456 supported_input_fourccs_)) { |
454 DVLOGF(1) << "unsupported profile " << config.profile; | 457 DVLOGF(1) << "unsupported profile " << config.profile; |
455 return false; | 458 return false; |
456 } | 459 } |
457 | 460 |
458 client_ptr_factory_.reset( | 461 client_ptr_factory_.reset( |
459 new base::WeakPtrFactory<VideoDecodeAccelerator::Client>(client)); | 462 new base::WeakPtrFactory<VideoDecodeAccelerator::Client>(client)); |
460 client_ = client_ptr_factory_->GetWeakPtr(); | 463 client_ = client_ptr_factory_->GetWeakPtr(); |
| 464 // If we haven't been set up to decode on separate thread via |
| 465 // TryInitializeDecodeOnSeparateThread(), use the main thread/client for |
| 466 // decode tasks. |
| 467 if (!decode_task_runner_) { |
| 468 decode_task_runner_ = child_task_runner_; |
| 469 DCHECK(!decode_client_); |
| 470 decode_client_ = client_; |
| 471 } |
461 | 472 |
462 video_profile_ = config.profile; | 473 video_profile_ = config.profile; |
463 | 474 |
464 if (video_profile_ >= media::H264PROFILE_MIN && | 475 if (video_profile_ >= media::H264PROFILE_MIN && |
465 video_profile_ <= media::H264PROFILE_MAX) { | 476 video_profile_ <= media::H264PROFILE_MAX) { |
466 h264_accelerator_.reset(new V4L2H264Accelerator(this)); | 477 h264_accelerator_.reset(new V4L2H264Accelerator(this)); |
467 decoder_.reset(new H264Decoder(h264_accelerator_.get())); | 478 decoder_.reset(new H264Decoder(h264_accelerator_.get())); |
468 } else if (video_profile_ >= media::VP8PROFILE_MIN && | 479 } else if (video_profile_ >= media::VP8PROFILE_MIN && |
469 video_profile_ <= media::VP8PROFILE_MAX) { | 480 video_profile_ <= media::VP8PROFILE_MAX) { |
470 vp8_accelerator_.reset(new V4L2VP8Accelerator(this)); | 481 vp8_accelerator_.reset(new V4L2VP8Accelerator(this)); |
471 decoder_.reset(new VP8Decoder(vp8_accelerator_.get())); | 482 decoder_.reset(new VP8Decoder(vp8_accelerator_.get())); |
472 } else { | 483 } else { |
473 NOTREACHED() << "Unsupported profile " << video_profile_; | 484 NOTREACHED() << "Unsupported profile " << video_profile_; |
474 return false; | 485 return false; |
475 } | 486 } |
476 | 487 |
477 // TODO(posciak): This needs to be queried once supported. | 488 // TODO(posciak): This needs to be queried once supported. |
478 input_planes_count_ = 1; | 489 input_planes_count_ = 1; |
479 output_planes_count_ = 1; | 490 output_planes_count_ = 1; |
480 | 491 |
481 if (egl_display_ == EGL_NO_DISPLAY) { | 492 if (egl_display_ == EGL_NO_DISPLAY) { |
482 LOG(ERROR) << "Initialize(): could not get EGLDisplay"; | 493 LOG(ERROR) << "Initialize(): could not get EGLDisplay"; |
483 return false; | 494 return false; |
484 } | 495 } |
485 | 496 |
486 // We need the context to be initialized to query extensions. | 497 // We need the context to be initialized to query extensions. |
487 if (!make_context_current_.Run()) { | 498 if (!make_context_current_cb_.Run()) { |
488 LOG(ERROR) << "Initialize(): could not make context current"; | 499 LOG(ERROR) << "Initialize(): could not make context current"; |
489 return false; | 500 return false; |
490 } | 501 } |
491 | 502 |
492 if (!gfx::g_driver_egl.ext.b_EGL_KHR_fence_sync) { | 503 if (!gfx::g_driver_egl.ext.b_EGL_KHR_fence_sync) { |
493 LOG(ERROR) << "Initialize(): context does not have EGL_KHR_fence_sync"; | 504 LOG(ERROR) << "Initialize(): context does not have EGL_KHR_fence_sync"; |
494 return false; | 505 return false; |
495 } | 506 } |
496 | 507 |
497 // Capabilities check. | 508 // Capabilities check. |
(...skipping 683 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1181 decoder_display_queue_.pop(); | 1192 decoder_display_queue_.pop(); |
1182 | 1193 |
1183 DVLOGF(3) << "Device poll stopped"; | 1194 DVLOGF(3) << "Device poll stopped"; |
1184 return true; | 1195 return true; |
1185 } | 1196 } |
1186 | 1197 |
1187 void V4L2SliceVideoDecodeAccelerator::Decode( | 1198 void V4L2SliceVideoDecodeAccelerator::Decode( |
1188 const media::BitstreamBuffer& bitstream_buffer) { | 1199 const media::BitstreamBuffer& bitstream_buffer) { |
1189 DVLOGF(3) << "input_id=" << bitstream_buffer.id() | 1200 DVLOGF(3) << "input_id=" << bitstream_buffer.id() |
1190 << ", size=" << bitstream_buffer.size(); | 1201 << ", size=" << bitstream_buffer.size(); |
1191 DCHECK(io_task_runner_->BelongsToCurrentThread()); | 1202 DCHECK(decode_task_runner_->BelongsToCurrentThread()); |
1192 | 1203 |
1193 if (bitstream_buffer.id() < 0) { | 1204 if (bitstream_buffer.id() < 0) { |
1194 LOG(ERROR) << "Invalid bitstream_buffer, id: " << bitstream_buffer.id(); | 1205 LOG(ERROR) << "Invalid bitstream_buffer, id: " << bitstream_buffer.id(); |
1195 if (base::SharedMemory::IsHandleValid(bitstream_buffer.handle())) | 1206 if (base::SharedMemory::IsHandleValid(bitstream_buffer.handle())) |
1196 base::SharedMemory::CloseHandle(bitstream_buffer.handle()); | 1207 base::SharedMemory::CloseHandle(bitstream_buffer.handle()); |
1197 NOTIFY_ERROR(INVALID_ARGUMENT); | 1208 NOTIFY_ERROR(INVALID_ARGUMENT); |
1198 return; | 1209 return; |
1199 } | 1210 } |
1200 | 1211 |
1201 decoder_thread_task_runner_->PostTask( | 1212 decoder_thread_task_runner_->PostTask( |
1202 FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::DecodeTask, | 1213 FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::DecodeTask, |
1203 base::Unretained(this), bitstream_buffer)); | 1214 base::Unretained(this), bitstream_buffer)); |
1204 } | 1215 } |
1205 | 1216 |
1206 void V4L2SliceVideoDecodeAccelerator::DecodeTask( | 1217 void V4L2SliceVideoDecodeAccelerator::DecodeTask( |
1207 const media::BitstreamBuffer& bitstream_buffer) { | 1218 const media::BitstreamBuffer& bitstream_buffer) { |
1208 DVLOGF(3) << "input_id=" << bitstream_buffer.id() | 1219 DVLOGF(3) << "input_id=" << bitstream_buffer.id() |
1209 << " size=" << bitstream_buffer.size(); | 1220 << " size=" << bitstream_buffer.size(); |
1210 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 1221 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
1211 | 1222 |
1212 scoped_ptr<BitstreamBufferRef> bitstream_record(new BitstreamBufferRef( | 1223 scoped_ptr<BitstreamBufferRef> bitstream_record(new BitstreamBufferRef( |
1213 io_client_, io_task_runner_, | 1224 decode_client_, decode_task_runner_, |
1214 new base::SharedMemory(bitstream_buffer.handle(), true), | 1225 new base::SharedMemory(bitstream_buffer.handle(), true), |
1215 bitstream_buffer.size(), bitstream_buffer.id())); | 1226 bitstream_buffer.size(), bitstream_buffer.id())); |
1216 if (!bitstream_record->shm->Map(bitstream_buffer.size())) { | 1227 if (!bitstream_record->shm->Map(bitstream_buffer.size())) { |
1217 LOGF(ERROR) << "Could not map bitstream_buffer"; | 1228 LOGF(ERROR) << "Could not map bitstream_buffer"; |
1218 NOTIFY_ERROR(UNREADABLE_INPUT); | 1229 NOTIFY_ERROR(UNREADABLE_INPUT); |
1219 return; | 1230 return; |
1220 } | 1231 } |
1221 DVLOGF(3) << "mapped at=" << bitstream_record->shm->memory(); | 1232 DVLOGF(3) << "mapped at=" << bitstream_record->shm->memory(); |
1222 | 1233 |
1223 decoder_input_queue_.push( | 1234 decoder_input_queue_.push( |
(...skipping 261 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1485 output_planes_count_)); | 1496 output_planes_count_)); |
1486 } | 1497 } |
1487 | 1498 |
1488 void V4L2SliceVideoDecodeAccelerator::CreateEGLImages( | 1499 void V4L2SliceVideoDecodeAccelerator::CreateEGLImages( |
1489 const std::vector<media::PictureBuffer>& buffers, | 1500 const std::vector<media::PictureBuffer>& buffers, |
1490 uint32_t output_format_fourcc, | 1501 uint32_t output_format_fourcc, |
1491 size_t output_planes_count) { | 1502 size_t output_planes_count) { |
1492 DVLOGF(3); | 1503 DVLOGF(3); |
1493 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 1504 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
1494 | 1505 |
1495 if (!make_context_current_.Run()) { | 1506 gfx::GLContext* gl_context = get_gl_context_cb_.Run(); |
1496 DLOG(ERROR) << "could not make context current"; | 1507 if (!gl_context || !make_context_current_cb_.Run()) { |
| 1508 DLOG(ERROR) << "No GL context"; |
1497 NOTIFY_ERROR(PLATFORM_FAILURE); | 1509 NOTIFY_ERROR(PLATFORM_FAILURE); |
1498 return; | 1510 return; |
1499 } | 1511 } |
1500 | 1512 |
1501 gfx::ScopedTextureBinder bind_restore(GL_TEXTURE_EXTERNAL_OES, 0); | 1513 gfx::ScopedTextureBinder bind_restore(GL_TEXTURE_EXTERNAL_OES, 0); |
1502 | 1514 |
1503 std::vector<EGLImageKHR> egl_images; | 1515 std::vector<EGLImageKHR> egl_images; |
1504 for (size_t i = 0; i < buffers.size(); ++i) { | 1516 for (size_t i = 0; i < buffers.size(); ++i) { |
1505 EGLImageKHR egl_image = device_->CreateEGLImage(egl_display_, | 1517 EGLImageKHR egl_image = device_->CreateEGLImage(egl_display_, |
1506 egl_context_, | 1518 gl_context->GetHandle(), |
1507 buffers[i].texture_id(), | 1519 buffers[i].texture_id(), |
1508 buffers[i].size(), | 1520 buffers[i].size(), |
1509 i, | 1521 i, |
1510 output_format_fourcc, | 1522 output_format_fourcc, |
1511 output_planes_count); | 1523 output_planes_count); |
1512 if (egl_image == EGL_NO_IMAGE_KHR) { | 1524 if (egl_image == EGL_NO_IMAGE_KHR) { |
1513 LOGF(ERROR) << "Could not create EGLImageKHR"; | 1525 LOGF(ERROR) << "Could not create EGLImageKHR"; |
1514 for (const auto& image_to_destroy : egl_images) | 1526 for (const auto& image_to_destroy : egl_images) |
1515 device_->DestroyEGLImage(egl_display_, image_to_destroy); | 1527 device_->DestroyEGLImage(egl_display_, image_to_destroy); |
1516 | 1528 |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1562 } | 1574 } |
1563 | 1575 |
1564 ProcessPendingEventsIfNeeded(); | 1576 ProcessPendingEventsIfNeeded(); |
1565 } | 1577 } |
1566 | 1578 |
1567 void V4L2SliceVideoDecodeAccelerator::ReusePictureBuffer( | 1579 void V4L2SliceVideoDecodeAccelerator::ReusePictureBuffer( |
1568 int32_t picture_buffer_id) { | 1580 int32_t picture_buffer_id) { |
1569 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 1581 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
1570 DVLOGF(4) << "picture_buffer_id=" << picture_buffer_id; | 1582 DVLOGF(4) << "picture_buffer_id=" << picture_buffer_id; |
1571 | 1583 |
1572 if (!make_context_current_.Run()) { | 1584 if (!make_context_current_cb_.Run()) { |
1573 LOGF(ERROR) << "could not make context current"; | 1585 LOGF(ERROR) << "could not make context current"; |
1574 NOTIFY_ERROR(PLATFORM_FAILURE); | 1586 NOTIFY_ERROR(PLATFORM_FAILURE); |
1575 return; | 1587 return; |
1576 } | 1588 } |
1577 | 1589 |
1578 EGLSyncKHR egl_sync = | 1590 EGLSyncKHR egl_sync = |
1579 eglCreateSyncKHR(egl_display_, EGL_SYNC_FENCE_KHR, NULL); | 1591 eglCreateSyncKHR(egl_display_, EGL_SYNC_FENCE_KHR, NULL); |
1580 if (egl_sync == EGL_NO_SYNC_KHR) { | 1592 if (egl_sync == EGL_NO_SYNC_KHR) { |
1581 LOGF(ERROR) << "eglCreateSyncKHR() failed"; | 1593 LOGF(ERROR) << "eglCreateSyncKHR() failed"; |
1582 NOTIFY_ERROR(PLATFORM_FAILURE); | 1594 NOTIFY_ERROR(PLATFORM_FAILURE); |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1638 | 1650 |
1639 void V4L2SliceVideoDecodeAccelerator::FlushTask() { | 1651 void V4L2SliceVideoDecodeAccelerator::FlushTask() { |
1640 DVLOGF(3); | 1652 DVLOGF(3); |
1641 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 1653 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
1642 | 1654 |
1643 if (!decoder_input_queue_.empty()) { | 1655 if (!decoder_input_queue_.empty()) { |
1644 // We are not done with pending inputs, so queue an empty buffer, | 1656 // We are not done with pending inputs, so queue an empty buffer, |
1645 // which - when reached - will trigger flush sequence. | 1657 // which - when reached - will trigger flush sequence. |
1646 decoder_input_queue_.push( | 1658 decoder_input_queue_.push( |
1647 linked_ptr<BitstreamBufferRef>(new BitstreamBufferRef( | 1659 linked_ptr<BitstreamBufferRef>(new BitstreamBufferRef( |
1648 io_client_, io_task_runner_, nullptr, 0, kFlushBufferId))); | 1660 decode_client_, decode_task_runner_, nullptr, 0, kFlushBufferId))); |
1649 return; | 1661 return; |
1650 } | 1662 } |
1651 | 1663 |
1652 // No more inputs pending, so just finish flushing here. | 1664 // No more inputs pending, so just finish flushing here. |
1653 InitiateFlush(); | 1665 InitiateFlush(); |
1654 } | 1666 } |
1655 | 1667 |
1656 void V4L2SliceVideoDecodeAccelerator::InitiateFlush() { | 1668 void V4L2SliceVideoDecodeAccelerator::InitiateFlush() { |
1657 DVLOGF(3); | 1669 DVLOGF(3); |
1658 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 1670 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
(...skipping 890 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2549 } | 2561 } |
2550 | 2562 |
2551 void V4L2SliceVideoDecodeAccelerator::SendPictureReady() { | 2563 void V4L2SliceVideoDecodeAccelerator::SendPictureReady() { |
2552 DVLOGF(3); | 2564 DVLOGF(3); |
2553 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 2565 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
2554 bool resetting_or_flushing = (decoder_resetting_ || decoder_flushing_); | 2566 bool resetting_or_flushing = (decoder_resetting_ || decoder_flushing_); |
2555 while (!pending_picture_ready_.empty()) { | 2567 while (!pending_picture_ready_.empty()) { |
2556 bool cleared = pending_picture_ready_.front().cleared; | 2568 bool cleared = pending_picture_ready_.front().cleared; |
2557 const media::Picture& picture = pending_picture_ready_.front().picture; | 2569 const media::Picture& picture = pending_picture_ready_.front().picture; |
2558 if (cleared && picture_clearing_count_ == 0) { | 2570 if (cleared && picture_clearing_count_ == 0) { |
2559 DVLOGF(4) << "Posting picture ready to IO for: " | 2571 DVLOGF(4) << "Posting picture ready to decode task runner for: " |
2560 << picture.picture_buffer_id(); | 2572 << picture.picture_buffer_id(); |
2561 // This picture is cleared. Post it to IO thread to reduce latency. This | 2573 // This picture is cleared. It can be posted to a thread different than |
2562 // should be the case after all pictures are cleared at the beginning. | 2574 // the main GPU thread to reduce latency. This should be the case after |
2563 io_task_runner_->PostTask( | 2575 // all pictures are cleared at the beginning. |
2564 FROM_HERE, base::Bind(&Client::PictureReady, io_client_, picture)); | 2576 decode_task_runner_->PostTask( |
| 2577 FROM_HERE, |
| 2578 base::Bind(&Client::PictureReady, decode_client_, picture)); |
2565 pending_picture_ready_.pop(); | 2579 pending_picture_ready_.pop(); |
2566 } else if (!cleared || resetting_or_flushing) { | 2580 } else if (!cleared || resetting_or_flushing) { |
2567 DVLOGF(3) << "cleared=" << pending_picture_ready_.front().cleared | 2581 DVLOGF(3) << "cleared=" << pending_picture_ready_.front().cleared |
2568 << ", decoder_resetting_=" << decoder_resetting_ | 2582 << ", decoder_resetting_=" << decoder_resetting_ |
2569 << ", decoder_flushing_=" << decoder_flushing_ | 2583 << ", decoder_flushing_=" << decoder_flushing_ |
2570 << ", picture_clearing_count_=" << picture_clearing_count_; | 2584 << ", picture_clearing_count_=" << picture_clearing_count_; |
2571 DVLOGF(4) << "Posting picture ready to GPU for: " | 2585 DVLOGF(4) << "Posting picture ready to GPU for: " |
2572 << picture.picture_buffer_id(); | 2586 << picture.picture_buffer_id(); |
2573 // If the picture is not cleared, post it to the child thread because it | 2587 // If the picture is not cleared, post it to the child thread because it |
2574 // has to be cleared in the child thread. A picture only needs to be | 2588 // has to be cleared in the child thread. A picture only needs to be |
(...skipping 17 matching lines...) Expand all Loading... |
2592 } | 2606 } |
2593 | 2607 |
2594 void V4L2SliceVideoDecodeAccelerator::PictureCleared() { | 2608 void V4L2SliceVideoDecodeAccelerator::PictureCleared() { |
2595 DVLOGF(3) << "clearing count=" << picture_clearing_count_; | 2609 DVLOGF(3) << "clearing count=" << picture_clearing_count_; |
2596 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 2610 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
2597 DCHECK_GT(picture_clearing_count_, 0); | 2611 DCHECK_GT(picture_clearing_count_, 0); |
2598 picture_clearing_count_--; | 2612 picture_clearing_count_--; |
2599 SendPictureReady(); | 2613 SendPictureReady(); |
2600 } | 2614 } |
2601 | 2615 |
2602 bool V4L2SliceVideoDecodeAccelerator::CanDecodeOnIOThread() { | 2616 bool V4L2SliceVideoDecodeAccelerator::TryInitializeDecodeOnSeparateThread( |
| 2617 const base::WeakPtr<Client>& decode_client, |
| 2618 const scoped_refptr<base::SingleThreadTaskRunner>& decode_task_runner) { |
| 2619 decode_client_ = decode_client_; |
| 2620 decode_task_runner_ = decode_task_runner; |
2603 return true; | 2621 return true; |
2604 } | 2622 } |
2605 | 2623 |
2606 // static | 2624 // static |
2607 media::VideoDecodeAccelerator::SupportedProfiles | 2625 media::VideoDecodeAccelerator::SupportedProfiles |
2608 V4L2SliceVideoDecodeAccelerator::GetSupportedProfiles() { | 2626 V4L2SliceVideoDecodeAccelerator::GetSupportedProfiles() { |
2609 scoped_refptr<V4L2Device> device = V4L2Device::Create(V4L2Device::kDecoder); | 2627 scoped_refptr<V4L2Device> device = V4L2Device::Create(V4L2Device::kDecoder); |
2610 if (!device) | 2628 if (!device) |
2611 return SupportedProfiles(); | 2629 return SupportedProfiles(); |
2612 | 2630 |
2613 return device->GetSupportedDecodeProfiles(arraysize(supported_input_fourccs_), | 2631 return device->GetSupportedDecodeProfiles(arraysize(supported_input_fourccs_), |
2614 supported_input_fourccs_); | 2632 supported_input_fourccs_); |
2615 } | 2633 } |
2616 | 2634 |
2617 } // namespace content | 2635 } // namespace content |
OLD | NEW |