OLD | NEW |
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include <errno.h> | 5 #include <errno.h> |
6 #include <fcntl.h> | 6 #include <fcntl.h> |
7 #include <linux/videodev2.h> | 7 #include <linux/videodev2.h> |
8 #include <poll.h> | 8 #include <poll.h> |
9 #include <string.h> | 9 #include <string.h> |
10 #include <sys/eventfd.h> | 10 #include <sys/eventfd.h> |
11 #include <sys/ioctl.h> | 11 #include <sys/ioctl.h> |
12 #include <sys/mman.h> | 12 #include <sys/mman.h> |
13 | 13 |
14 #include "base/bind.h" | 14 #include "base/bind.h" |
15 #include "base/bind_helpers.h" | 15 #include "base/bind_helpers.h" |
16 #include "base/callback.h" | 16 #include "base/callback.h" |
17 #include "base/callback_helpers.h" | 17 #include "base/callback_helpers.h" |
18 #include "base/command_line.h" | 18 #include "base/command_line.h" |
19 #include "base/macros.h" | 19 #include "base/macros.h" |
20 #include "base/numerics/safe_conversions.h" | 20 #include "base/numerics/safe_conversions.h" |
21 #include "base/strings/stringprintf.h" | 21 #include "base/strings/stringprintf.h" |
22 #include "content/common/gpu/media/shared_memory_region.h" | 22 #include "content/common/gpu/media/shared_memory_region.h" |
23 #include "content/common/gpu/media/v4l2_slice_video_decode_accelerator.h" | 23 #include "content/common/gpu/media/v4l2_slice_video_decode_accelerator.h" |
24 #include "media/base/bind_to_current_loop.h" | 24 #include "media/base/bind_to_current_loop.h" |
25 #include "media/base/media_switches.h" | 25 #include "media/base/media_switches.h" |
| 26 #include "ui/gl/gl_context.h" |
26 #include "ui/gl/scoped_binders.h" | 27 #include "ui/gl/scoped_binders.h" |
27 | 28 |
28 #define LOGF(level) LOG(level) << __FUNCTION__ << "(): " | 29 #define LOGF(level) LOG(level) << __FUNCTION__ << "(): " |
29 #define DVLOGF(level) DVLOG(level) << __FUNCTION__ << "(): " | 30 #define DVLOGF(level) DVLOG(level) << __FUNCTION__ << "(): " |
30 | 31 |
31 #define NOTIFY_ERROR(x) \ | 32 #define NOTIFY_ERROR(x) \ |
32 do { \ | 33 do { \ |
33 LOG(ERROR) << "Setting error state:" << x; \ | 34 LOG(ERROR) << "Setting error state:" << x; \ |
34 SetErrorState(x); \ | 35 SetErrorState(x); \ |
35 } while (0) | 36 } while (0) |
(...skipping 336 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
372 V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>& dec_surface) | 373 V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>& dec_surface) |
373 : dec_surface_(dec_surface) { | 374 : dec_surface_(dec_surface) { |
374 } | 375 } |
375 | 376 |
376 V4L2VP8Picture::~V4L2VP8Picture() { | 377 V4L2VP8Picture::~V4L2VP8Picture() { |
377 } | 378 } |
378 | 379 |
379 V4L2SliceVideoDecodeAccelerator::V4L2SliceVideoDecodeAccelerator( | 380 V4L2SliceVideoDecodeAccelerator::V4L2SliceVideoDecodeAccelerator( |
380 const scoped_refptr<V4L2Device>& device, | 381 const scoped_refptr<V4L2Device>& device, |
381 EGLDisplay egl_display, | 382 EGLDisplay egl_display, |
382 EGLContext egl_context, | 383 const GetGLContextCallback& get_gl_context_cb, |
383 const base::WeakPtr<Client>& io_client, | 384 const MakeGLContextCurrentCallback& make_context_current_cb) |
384 const base::Callback<bool(void)>& make_context_current, | |
385 const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner) | |
386 : input_planes_count_(0), | 385 : input_planes_count_(0), |
387 output_planes_count_(0), | 386 output_planes_count_(0), |
388 child_task_runner_(base::ThreadTaskRunnerHandle::Get()), | 387 child_task_runner_(base::ThreadTaskRunnerHandle::Get()), |
389 io_task_runner_(io_task_runner), | |
390 io_client_(io_client), | |
391 device_(device), | 388 device_(device), |
392 decoder_thread_("V4L2SliceVideoDecodeAcceleratorThread"), | 389 decoder_thread_("V4L2SliceVideoDecodeAcceleratorThread"), |
393 device_poll_thread_("V4L2SliceVideoDecodeAcceleratorDevicePollThread"), | 390 device_poll_thread_("V4L2SliceVideoDecodeAcceleratorDevicePollThread"), |
394 input_streamon_(false), | 391 input_streamon_(false), |
395 input_buffer_queued_count_(0), | 392 input_buffer_queued_count_(0), |
396 output_streamon_(false), | 393 output_streamon_(false), |
397 output_buffer_queued_count_(0), | 394 output_buffer_queued_count_(0), |
398 video_profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN), | 395 video_profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN), |
399 output_format_fourcc_(0), | 396 output_format_fourcc_(0), |
400 state_(kUninitialized), | 397 state_(kUninitialized), |
401 decoder_flushing_(false), | 398 decoder_flushing_(false), |
402 decoder_resetting_(false), | 399 decoder_resetting_(false), |
403 surface_set_change_pending_(false), | 400 surface_set_change_pending_(false), |
404 picture_clearing_count_(0), | 401 picture_clearing_count_(0), |
405 make_context_current_(make_context_current), | |
406 egl_display_(egl_display), | 402 egl_display_(egl_display), |
407 egl_context_(egl_context), | 403 get_gl_context_cb_(get_gl_context_cb), |
| 404 make_context_current_cb_(make_context_current_cb), |
408 weak_this_factory_(this) { | 405 weak_this_factory_(this) { |
409 weak_this_ = weak_this_factory_.GetWeakPtr(); | 406 weak_this_ = weak_this_factory_.GetWeakPtr(); |
410 } | 407 } |
411 | 408 |
412 V4L2SliceVideoDecodeAccelerator::~V4L2SliceVideoDecodeAccelerator() { | 409 V4L2SliceVideoDecodeAccelerator::~V4L2SliceVideoDecodeAccelerator() { |
413 DVLOGF(2); | 410 DVLOGF(2); |
414 | 411 |
415 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 412 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
416 DCHECK(!decoder_thread_.IsRunning()); | 413 DCHECK(!decoder_thread_.IsRunning()); |
417 DCHECK(!device_poll_thread_.IsRunning()); | 414 DCHECK(!device_poll_thread_.IsRunning()); |
(...skipping 15 matching lines...) Expand all Loading... |
433 client_ptr_factory_.reset(); | 430 client_ptr_factory_.reset(); |
434 } | 431 } |
435 } | 432 } |
436 | 433 |
437 bool V4L2SliceVideoDecodeAccelerator::Initialize(const Config& config, | 434 bool V4L2SliceVideoDecodeAccelerator::Initialize(const Config& config, |
438 Client* client) { | 435 Client* client) { |
439 DVLOGF(3) << "profile: " << config.profile; | 436 DVLOGF(3) << "profile: " << config.profile; |
440 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 437 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
441 DCHECK_EQ(state_, kUninitialized); | 438 DCHECK_EQ(state_, kUninitialized); |
442 | 439 |
| 440 if (get_gl_context_cb_.is_null() || make_context_current_cb_.is_null()) { |
| 441 NOTREACHED() << "GL callbacks are required for this VDA"; |
| 442 return false; |
| 443 } |
| 444 |
443 if (config.is_encrypted) { | 445 if (config.is_encrypted) { |
444 NOTREACHED() << "Encrypted streams are not supported for this VDA"; | 446 NOTREACHED() << "Encrypted streams are not supported for this VDA"; |
445 return false; | 447 return false; |
446 } | 448 } |
447 | 449 |
448 if (!device_->SupportsDecodeProfileForV4L2PixelFormats( | 450 if (!device_->SupportsDecodeProfileForV4L2PixelFormats( |
449 config.profile, arraysize(supported_input_fourccs_), | 451 config.profile, arraysize(supported_input_fourccs_), |
450 supported_input_fourccs_)) { | 452 supported_input_fourccs_)) { |
451 DVLOGF(1) << "unsupported profile " << config.profile; | 453 DVLOGF(1) << "unsupported profile " << config.profile; |
452 return false; | 454 return false; |
453 } | 455 } |
454 | 456 |
455 client_ptr_factory_.reset( | 457 client_ptr_factory_.reset( |
456 new base::WeakPtrFactory<VideoDecodeAccelerator::Client>(client)); | 458 new base::WeakPtrFactory<VideoDecodeAccelerator::Client>(client)); |
457 client_ = client_ptr_factory_->GetWeakPtr(); | 459 client_ = client_ptr_factory_->GetWeakPtr(); |
| 460 // If we haven't been set up to decode on separate thread via |
| 461 // TryToSetupDecodeOnSeparateThread(), use the main thread/client for |
| 462 // decode tasks. |
| 463 if (!decode_task_runner_) { |
| 464 decode_task_runner_ = child_task_runner_; |
| 465 DCHECK(!decode_client_); |
| 466 decode_client_ = client_; |
| 467 } |
458 | 468 |
459 video_profile_ = config.profile; | 469 video_profile_ = config.profile; |
460 | 470 |
461 if (video_profile_ >= media::H264PROFILE_MIN && | 471 if (video_profile_ >= media::H264PROFILE_MIN && |
462 video_profile_ <= media::H264PROFILE_MAX) { | 472 video_profile_ <= media::H264PROFILE_MAX) { |
463 h264_accelerator_.reset(new V4L2H264Accelerator(this)); | 473 h264_accelerator_.reset(new V4L2H264Accelerator(this)); |
464 decoder_.reset(new H264Decoder(h264_accelerator_.get())); | 474 decoder_.reset(new H264Decoder(h264_accelerator_.get())); |
465 } else if (video_profile_ >= media::VP8PROFILE_MIN && | 475 } else if (video_profile_ >= media::VP8PROFILE_MIN && |
466 video_profile_ <= media::VP8PROFILE_MAX) { | 476 video_profile_ <= media::VP8PROFILE_MAX) { |
467 vp8_accelerator_.reset(new V4L2VP8Accelerator(this)); | 477 vp8_accelerator_.reset(new V4L2VP8Accelerator(this)); |
468 decoder_.reset(new VP8Decoder(vp8_accelerator_.get())); | 478 decoder_.reset(new VP8Decoder(vp8_accelerator_.get())); |
469 } else { | 479 } else { |
470 NOTREACHED() << "Unsupported profile " << video_profile_; | 480 NOTREACHED() << "Unsupported profile " << video_profile_; |
471 return false; | 481 return false; |
472 } | 482 } |
473 | 483 |
474 // TODO(posciak): This needs to be queried once supported. | 484 // TODO(posciak): This needs to be queried once supported. |
475 input_planes_count_ = 1; | 485 input_planes_count_ = 1; |
476 output_planes_count_ = 1; | 486 output_planes_count_ = 1; |
477 | 487 |
478 if (egl_display_ == EGL_NO_DISPLAY) { | 488 if (egl_display_ == EGL_NO_DISPLAY) { |
479 LOG(ERROR) << "Initialize(): could not get EGLDisplay"; | 489 LOG(ERROR) << "Initialize(): could not get EGLDisplay"; |
480 return false; | 490 return false; |
481 } | 491 } |
482 | 492 |
483 // We need the context to be initialized to query extensions. | 493 // We need the context to be initialized to query extensions. |
484 if (!make_context_current_.Run()) { | 494 if (!make_context_current_cb_.Run()) { |
485 LOG(ERROR) << "Initialize(): could not make context current"; | 495 LOG(ERROR) << "Initialize(): could not make context current"; |
486 return false; | 496 return false; |
487 } | 497 } |
488 | 498 |
489 if (!gfx::g_driver_egl.ext.b_EGL_KHR_fence_sync) { | 499 if (!gfx::g_driver_egl.ext.b_EGL_KHR_fence_sync) { |
490 LOG(ERROR) << "Initialize(): context does not have EGL_KHR_fence_sync"; | 500 LOG(ERROR) << "Initialize(): context does not have EGL_KHR_fence_sync"; |
491 return false; | 501 return false; |
492 } | 502 } |
493 | 503 |
494 // Capabilities check. | 504 // Capabilities check. |
(...skipping 683 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1178 decoder_display_queue_.pop(); | 1188 decoder_display_queue_.pop(); |
1179 | 1189 |
1180 DVLOGF(3) << "Device poll stopped"; | 1190 DVLOGF(3) << "Device poll stopped"; |
1181 return true; | 1191 return true; |
1182 } | 1192 } |
1183 | 1193 |
1184 void V4L2SliceVideoDecodeAccelerator::Decode( | 1194 void V4L2SliceVideoDecodeAccelerator::Decode( |
1185 const media::BitstreamBuffer& bitstream_buffer) { | 1195 const media::BitstreamBuffer& bitstream_buffer) { |
1186 DVLOGF(3) << "input_id=" << bitstream_buffer.id() | 1196 DVLOGF(3) << "input_id=" << bitstream_buffer.id() |
1187 << ", size=" << bitstream_buffer.size(); | 1197 << ", size=" << bitstream_buffer.size(); |
1188 DCHECK(io_task_runner_->BelongsToCurrentThread()); | 1198 DCHECK(decode_task_runner_->BelongsToCurrentThread()); |
1189 | 1199 |
1190 if (bitstream_buffer.id() < 0) { | 1200 if (bitstream_buffer.id() < 0) { |
1191 LOG(ERROR) << "Invalid bitstream_buffer, id: " << bitstream_buffer.id(); | 1201 LOG(ERROR) << "Invalid bitstream_buffer, id: " << bitstream_buffer.id(); |
1192 if (base::SharedMemory::IsHandleValid(bitstream_buffer.handle())) | 1202 if (base::SharedMemory::IsHandleValid(bitstream_buffer.handle())) |
1193 base::SharedMemory::CloseHandle(bitstream_buffer.handle()); | 1203 base::SharedMemory::CloseHandle(bitstream_buffer.handle()); |
1194 NOTIFY_ERROR(INVALID_ARGUMENT); | 1204 NOTIFY_ERROR(INVALID_ARGUMENT); |
1195 return; | 1205 return; |
1196 } | 1206 } |
1197 | 1207 |
1198 decoder_thread_task_runner_->PostTask( | 1208 decoder_thread_task_runner_->PostTask( |
1199 FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::DecodeTask, | 1209 FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::DecodeTask, |
1200 base::Unretained(this), bitstream_buffer)); | 1210 base::Unretained(this), bitstream_buffer)); |
1201 } | 1211 } |
1202 | 1212 |
1203 void V4L2SliceVideoDecodeAccelerator::DecodeTask( | 1213 void V4L2SliceVideoDecodeAccelerator::DecodeTask( |
1204 const media::BitstreamBuffer& bitstream_buffer) { | 1214 const media::BitstreamBuffer& bitstream_buffer) { |
1205 DVLOGF(3) << "input_id=" << bitstream_buffer.id() | 1215 DVLOGF(3) << "input_id=" << bitstream_buffer.id() |
1206 << " size=" << bitstream_buffer.size(); | 1216 << " size=" << bitstream_buffer.size(); |
1207 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 1217 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
1208 | 1218 |
1209 scoped_ptr<BitstreamBufferRef> bitstream_record(new BitstreamBufferRef( | 1219 scoped_ptr<BitstreamBufferRef> bitstream_record(new BitstreamBufferRef( |
1210 io_client_, io_task_runner_, | 1220 decode_client_, decode_task_runner_, |
1211 new SharedMemoryRegion(bitstream_buffer, true), bitstream_buffer.id())); | 1221 new SharedMemoryRegion(bitstream_buffer, true), bitstream_buffer.id())); |
1212 if (!bitstream_record->shm->Map()) { | 1222 if (!bitstream_record->shm->Map()) { |
1213 LOGF(ERROR) << "Could not map bitstream_buffer"; | 1223 LOGF(ERROR) << "Could not map bitstream_buffer"; |
1214 NOTIFY_ERROR(UNREADABLE_INPUT); | 1224 NOTIFY_ERROR(UNREADABLE_INPUT); |
1215 return; | 1225 return; |
1216 } | 1226 } |
1217 DVLOGF(3) << "mapped at=" << bitstream_record->shm->memory(); | 1227 DVLOGF(3) << "mapped at=" << bitstream_record->shm->memory(); |
1218 | 1228 |
1219 decoder_input_queue_.push( | 1229 decoder_input_queue_.push( |
1220 linked_ptr<BitstreamBufferRef>(bitstream_record.release())); | 1230 linked_ptr<BitstreamBufferRef>(bitstream_record.release())); |
(...skipping 260 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1481 output_planes_count_)); | 1491 output_planes_count_)); |
1482 } | 1492 } |
1483 | 1493 |
1484 void V4L2SliceVideoDecodeAccelerator::CreateEGLImages( | 1494 void V4L2SliceVideoDecodeAccelerator::CreateEGLImages( |
1485 const std::vector<media::PictureBuffer>& buffers, | 1495 const std::vector<media::PictureBuffer>& buffers, |
1486 uint32_t output_format_fourcc, | 1496 uint32_t output_format_fourcc, |
1487 size_t output_planes_count) { | 1497 size_t output_planes_count) { |
1488 DVLOGF(3); | 1498 DVLOGF(3); |
1489 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 1499 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
1490 | 1500 |
1491 if (!make_context_current_.Run()) { | 1501 gfx::GLContext* gl_context = get_gl_context_cb_.Run(); |
1492 DLOG(ERROR) << "could not make context current"; | 1502 if (!gl_context || !make_context_current_cb_.Run()) { |
| 1503 DLOG(ERROR) << "No GL context"; |
1493 NOTIFY_ERROR(PLATFORM_FAILURE); | 1504 NOTIFY_ERROR(PLATFORM_FAILURE); |
1494 return; | 1505 return; |
1495 } | 1506 } |
1496 | 1507 |
1497 gfx::ScopedTextureBinder bind_restore(GL_TEXTURE_EXTERNAL_OES, 0); | 1508 gfx::ScopedTextureBinder bind_restore(GL_TEXTURE_EXTERNAL_OES, 0); |
1498 | 1509 |
1499 std::vector<EGLImageKHR> egl_images; | 1510 std::vector<EGLImageKHR> egl_images; |
1500 for (size_t i = 0; i < buffers.size(); ++i) { | 1511 for (size_t i = 0; i < buffers.size(); ++i) { |
1501 DCHECK_LE(1u, buffers[i].texture_ids().size()); | 1512 DCHECK_LE(1u, buffers[i].texture_ids().size()); |
| 1513 |
1502 EGLImageKHR egl_image = device_->CreateEGLImage( | 1514 EGLImageKHR egl_image = device_->CreateEGLImage( |
1503 egl_display_, egl_context_, buffers[i].texture_ids()[0], | 1515 egl_display_, gl_context->GetHandle(), buffers[i].texture_ids()[0], |
1504 buffers[i].size(), i, output_format_fourcc, output_planes_count); | 1516 buffers[i].size(), i, output_format_fourcc, output_planes_count); |
1505 if (egl_image == EGL_NO_IMAGE_KHR) { | 1517 if (egl_image == EGL_NO_IMAGE_KHR) { |
1506 LOGF(ERROR) << "Could not create EGLImageKHR"; | 1518 LOGF(ERROR) << "Could not create EGLImageKHR"; |
1507 for (const auto& image_to_destroy : egl_images) | 1519 for (const auto& image_to_destroy : egl_images) |
1508 device_->DestroyEGLImage(egl_display_, image_to_destroy); | 1520 device_->DestroyEGLImage(egl_display_, image_to_destroy); |
1509 | 1521 |
1510 NOTIFY_ERROR(PLATFORM_FAILURE); | 1522 NOTIFY_ERROR(PLATFORM_FAILURE); |
1511 return; | 1523 return; |
1512 } | 1524 } |
1513 | 1525 |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1555 } | 1567 } |
1556 | 1568 |
1557 ProcessPendingEventsIfNeeded(); | 1569 ProcessPendingEventsIfNeeded(); |
1558 } | 1570 } |
1559 | 1571 |
1560 void V4L2SliceVideoDecodeAccelerator::ReusePictureBuffer( | 1572 void V4L2SliceVideoDecodeAccelerator::ReusePictureBuffer( |
1561 int32_t picture_buffer_id) { | 1573 int32_t picture_buffer_id) { |
1562 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 1574 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
1563 DVLOGF(4) << "picture_buffer_id=" << picture_buffer_id; | 1575 DVLOGF(4) << "picture_buffer_id=" << picture_buffer_id; |
1564 | 1576 |
1565 if (!make_context_current_.Run()) { | 1577 if (!make_context_current_cb_.Run()) { |
1566 LOGF(ERROR) << "could not make context current"; | 1578 LOGF(ERROR) << "could not make context current"; |
1567 NOTIFY_ERROR(PLATFORM_FAILURE); | 1579 NOTIFY_ERROR(PLATFORM_FAILURE); |
1568 return; | 1580 return; |
1569 } | 1581 } |
1570 | 1582 |
1571 EGLSyncKHR egl_sync = | 1583 EGLSyncKHR egl_sync = |
1572 eglCreateSyncKHR(egl_display_, EGL_SYNC_FENCE_KHR, NULL); | 1584 eglCreateSyncKHR(egl_display_, EGL_SYNC_FENCE_KHR, NULL); |
1573 if (egl_sync == EGL_NO_SYNC_KHR) { | 1585 if (egl_sync == EGL_NO_SYNC_KHR) { |
1574 LOGF(ERROR) << "eglCreateSyncKHR() failed"; | 1586 LOGF(ERROR) << "eglCreateSyncKHR() failed"; |
1575 NOTIFY_ERROR(PLATFORM_FAILURE); | 1587 NOTIFY_ERROR(PLATFORM_FAILURE); |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1631 | 1643 |
1632 void V4L2SliceVideoDecodeAccelerator::FlushTask() { | 1644 void V4L2SliceVideoDecodeAccelerator::FlushTask() { |
1633 DVLOGF(3); | 1645 DVLOGF(3); |
1634 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 1646 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
1635 | 1647 |
1636 if (!decoder_input_queue_.empty()) { | 1648 if (!decoder_input_queue_.empty()) { |
1637 // We are not done with pending inputs, so queue an empty buffer, | 1649 // We are not done with pending inputs, so queue an empty buffer, |
1638 // which - when reached - will trigger flush sequence. | 1650 // which - when reached - will trigger flush sequence. |
1639 decoder_input_queue_.push( | 1651 decoder_input_queue_.push( |
1640 linked_ptr<BitstreamBufferRef>(new BitstreamBufferRef( | 1652 linked_ptr<BitstreamBufferRef>(new BitstreamBufferRef( |
1641 io_client_, io_task_runner_, nullptr, kFlushBufferId))); | 1653 decode_client_, decode_task_runner_, nullptr, kFlushBufferId))); |
1642 return; | 1654 return; |
1643 } | 1655 } |
1644 | 1656 |
1645 // No more inputs pending, so just finish flushing here. | 1657 // No more inputs pending, so just finish flushing here. |
1646 InitiateFlush(); | 1658 InitiateFlush(); |
1647 } | 1659 } |
1648 | 1660 |
1649 void V4L2SliceVideoDecodeAccelerator::InitiateFlush() { | 1661 void V4L2SliceVideoDecodeAccelerator::InitiateFlush() { |
1650 DVLOGF(3); | 1662 DVLOGF(3); |
1651 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 1663 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
(...skipping 890 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2542 } | 2554 } |
2543 | 2555 |
2544 void V4L2SliceVideoDecodeAccelerator::SendPictureReady() { | 2556 void V4L2SliceVideoDecodeAccelerator::SendPictureReady() { |
2545 DVLOGF(3); | 2557 DVLOGF(3); |
2546 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 2558 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
2547 bool resetting_or_flushing = (decoder_resetting_ || decoder_flushing_); | 2559 bool resetting_or_flushing = (decoder_resetting_ || decoder_flushing_); |
2548 while (!pending_picture_ready_.empty()) { | 2560 while (!pending_picture_ready_.empty()) { |
2549 bool cleared = pending_picture_ready_.front().cleared; | 2561 bool cleared = pending_picture_ready_.front().cleared; |
2550 const media::Picture& picture = pending_picture_ready_.front().picture; | 2562 const media::Picture& picture = pending_picture_ready_.front().picture; |
2551 if (cleared && picture_clearing_count_ == 0) { | 2563 if (cleared && picture_clearing_count_ == 0) { |
2552 DVLOGF(4) << "Posting picture ready to IO for: " | 2564 DVLOGF(4) << "Posting picture ready to decode task runner for: " |
2553 << picture.picture_buffer_id(); | 2565 << picture.picture_buffer_id(); |
2554 // This picture is cleared. Post it to IO thread to reduce latency. This | 2566 // This picture is cleared. It can be posted to a thread different than |
2555 // should be the case after all pictures are cleared at the beginning. | 2567 // the main GPU thread to reduce latency. This should be the case after |
2556 io_task_runner_->PostTask( | 2568 // all pictures are cleared at the beginning. |
2557 FROM_HERE, base::Bind(&Client::PictureReady, io_client_, picture)); | 2569 decode_task_runner_->PostTask( |
| 2570 FROM_HERE, |
| 2571 base::Bind(&Client::PictureReady, decode_client_, picture)); |
2558 pending_picture_ready_.pop(); | 2572 pending_picture_ready_.pop(); |
2559 } else if (!cleared || resetting_or_flushing) { | 2573 } else if (!cleared || resetting_or_flushing) { |
2560 DVLOGF(3) << "cleared=" << pending_picture_ready_.front().cleared | 2574 DVLOGF(3) << "cleared=" << pending_picture_ready_.front().cleared |
2561 << ", decoder_resetting_=" << decoder_resetting_ | 2575 << ", decoder_resetting_=" << decoder_resetting_ |
2562 << ", decoder_flushing_=" << decoder_flushing_ | 2576 << ", decoder_flushing_=" << decoder_flushing_ |
2563 << ", picture_clearing_count_=" << picture_clearing_count_; | 2577 << ", picture_clearing_count_=" << picture_clearing_count_; |
2564 DVLOGF(4) << "Posting picture ready to GPU for: " | 2578 DVLOGF(4) << "Posting picture ready to GPU for: " |
2565 << picture.picture_buffer_id(); | 2579 << picture.picture_buffer_id(); |
2566 // If the picture is not cleared, post it to the child thread because it | 2580 // If the picture is not cleared, post it to the child thread because it |
2567 // has to be cleared in the child thread. A picture only needs to be | 2581 // has to be cleared in the child thread. A picture only needs to be |
(...skipping 17 matching lines...) Expand all Loading... |
2585 } | 2599 } |
2586 | 2600 |
2587 void V4L2SliceVideoDecodeAccelerator::PictureCleared() { | 2601 void V4L2SliceVideoDecodeAccelerator::PictureCleared() { |
2588 DVLOGF(3) << "clearing count=" << picture_clearing_count_; | 2602 DVLOGF(3) << "clearing count=" << picture_clearing_count_; |
2589 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 2603 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
2590 DCHECK_GT(picture_clearing_count_, 0); | 2604 DCHECK_GT(picture_clearing_count_, 0); |
2591 picture_clearing_count_--; | 2605 picture_clearing_count_--; |
2592 SendPictureReady(); | 2606 SendPictureReady(); |
2593 } | 2607 } |
2594 | 2608 |
2595 bool V4L2SliceVideoDecodeAccelerator::CanDecodeOnIOThread() { | 2609 bool V4L2SliceVideoDecodeAccelerator::TryToSetupDecodeOnSeparateThread( |
| 2610 const base::WeakPtr<Client>& decode_client, |
| 2611 const scoped_refptr<base::SingleThreadTaskRunner>& decode_task_runner) { |
| 2612 decode_client_ = decode_client_; |
| 2613 decode_task_runner_ = decode_task_runner; |
2596 return true; | 2614 return true; |
2597 } | 2615 } |
2598 | 2616 |
2599 // static | 2617 // static |
2600 media::VideoDecodeAccelerator::SupportedProfiles | 2618 media::VideoDecodeAccelerator::SupportedProfiles |
2601 V4L2SliceVideoDecodeAccelerator::GetSupportedProfiles() { | 2619 V4L2SliceVideoDecodeAccelerator::GetSupportedProfiles() { |
2602 scoped_refptr<V4L2Device> device = V4L2Device::Create(V4L2Device::kDecoder); | 2620 scoped_refptr<V4L2Device> device = V4L2Device::Create(V4L2Device::kDecoder); |
2603 if (!device) | 2621 if (!device) |
2604 return SupportedProfiles(); | 2622 return SupportedProfiles(); |
2605 | 2623 |
2606 return device->GetSupportedDecodeProfiles(arraysize(supported_input_fourccs_), | 2624 return device->GetSupportedDecodeProfiles(arraysize(supported_input_fourccs_), |
2607 supported_input_fourccs_); | 2625 supported_input_fourccs_); |
2608 } | 2626 } |
2609 | 2627 |
2610 } // namespace content | 2628 } // namespace content |
OLD | NEW |