| OLD | NEW |
| 1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include <errno.h> | 5 #include <errno.h> |
| 6 #include <fcntl.h> | 6 #include <fcntl.h> |
| 7 #include <linux/videodev2.h> | 7 #include <linux/videodev2.h> |
| 8 #include <poll.h> | 8 #include <poll.h> |
| 9 #include <string.h> | 9 #include <string.h> |
| 10 #include <sys/eventfd.h> | 10 #include <sys/eventfd.h> |
| 11 #include <sys/ioctl.h> | 11 #include <sys/ioctl.h> |
| 12 #include <sys/mman.h> | 12 #include <sys/mman.h> |
| 13 | 13 |
| 14 #include "base/bind.h" | 14 #include "base/bind.h" |
| 15 #include "base/bind_helpers.h" | 15 #include "base/bind_helpers.h" |
| 16 #include "base/callback.h" | 16 #include "base/callback.h" |
| 17 #include "base/callback_helpers.h" | 17 #include "base/callback_helpers.h" |
| 18 #include "base/command_line.h" | 18 #include "base/command_line.h" |
| 19 #include "base/macros.h" | 19 #include "base/macros.h" |
| 20 #include "base/numerics/safe_conversions.h" | 20 #include "base/numerics/safe_conversions.h" |
| 21 #include "base/strings/stringprintf.h" | 21 #include "base/strings/stringprintf.h" |
| 22 #include "content/common/gpu/media/shared_memory_region.h" | 22 #include "content/common/gpu/media/shared_memory_region.h" |
| 23 #include "content/common/gpu/media/v4l2_slice_video_decode_accelerator.h" | 23 #include "content/common/gpu/media/v4l2_slice_video_decode_accelerator.h" |
| 24 #include "media/base/bind_to_current_loop.h" | 24 #include "media/base/bind_to_current_loop.h" |
| 25 #include "media/base/media_switches.h" | 25 #include "media/base/media_switches.h" |
| 26 #include "ui/gl/gl_context.h" |
| 26 #include "ui/gl/scoped_binders.h" | 27 #include "ui/gl/scoped_binders.h" |
| 27 | 28 |
| 28 #define LOGF(level) LOG(level) << __FUNCTION__ << "(): " | 29 #define LOGF(level) LOG(level) << __FUNCTION__ << "(): " |
| 29 #define DVLOGF(level) DVLOG(level) << __FUNCTION__ << "(): " | 30 #define DVLOGF(level) DVLOG(level) << __FUNCTION__ << "(): " |
| 30 | 31 |
| 31 #define NOTIFY_ERROR(x) \ | 32 #define NOTIFY_ERROR(x) \ |
| 32 do { \ | 33 do { \ |
| 33 LOG(ERROR) << "Setting error state:" << x; \ | 34 LOG(ERROR) << "Setting error state:" << x; \ |
| 34 SetErrorState(x); \ | 35 SetErrorState(x); \ |
| 35 } while (0) | 36 } while (0) |
| (...skipping 336 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 372 V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>& dec_surface) | 373 V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>& dec_surface) |
| 373 : dec_surface_(dec_surface) { | 374 : dec_surface_(dec_surface) { |
| 374 } | 375 } |
| 375 | 376 |
| 376 V4L2VP8Picture::~V4L2VP8Picture() { | 377 V4L2VP8Picture::~V4L2VP8Picture() { |
| 377 } | 378 } |
| 378 | 379 |
| 379 V4L2SliceVideoDecodeAccelerator::V4L2SliceVideoDecodeAccelerator( | 380 V4L2SliceVideoDecodeAccelerator::V4L2SliceVideoDecodeAccelerator( |
| 380 const scoped_refptr<V4L2Device>& device, | 381 const scoped_refptr<V4L2Device>& device, |
| 381 EGLDisplay egl_display, | 382 EGLDisplay egl_display, |
| 382 EGLContext egl_context, | 383 const GetGLContextCallback& get_gl_context_cb, |
| 383 const base::WeakPtr<Client>& io_client, | 384 const MakeGLContextCurrentCallback& make_context_current_cb) |
| 384 const base::Callback<bool(void)>& make_context_current, | |
| 385 const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner) | |
| 386 : input_planes_count_(0), | 385 : input_planes_count_(0), |
| 387 output_planes_count_(0), | 386 output_planes_count_(0), |
| 388 child_task_runner_(base::ThreadTaskRunnerHandle::Get()), | 387 child_task_runner_(base::ThreadTaskRunnerHandle::Get()), |
| 389 io_task_runner_(io_task_runner), | |
| 390 io_client_(io_client), | |
| 391 device_(device), | 388 device_(device), |
| 392 decoder_thread_("V4L2SliceVideoDecodeAcceleratorThread"), | 389 decoder_thread_("V4L2SliceVideoDecodeAcceleratorThread"), |
| 393 device_poll_thread_("V4L2SliceVideoDecodeAcceleratorDevicePollThread"), | 390 device_poll_thread_("V4L2SliceVideoDecodeAcceleratorDevicePollThread"), |
| 394 input_streamon_(false), | 391 input_streamon_(false), |
| 395 input_buffer_queued_count_(0), | 392 input_buffer_queued_count_(0), |
| 396 output_streamon_(false), | 393 output_streamon_(false), |
| 397 output_buffer_queued_count_(0), | 394 output_buffer_queued_count_(0), |
| 398 video_profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN), | 395 video_profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN), |
| 399 output_format_fourcc_(0), | 396 output_format_fourcc_(0), |
| 400 state_(kUninitialized), | 397 state_(kUninitialized), |
| 401 decoder_flushing_(false), | 398 decoder_flushing_(false), |
| 402 decoder_resetting_(false), | 399 decoder_resetting_(false), |
| 403 surface_set_change_pending_(false), | 400 surface_set_change_pending_(false), |
| 404 picture_clearing_count_(0), | 401 picture_clearing_count_(0), |
| 405 make_context_current_(make_context_current), | |
| 406 egl_display_(egl_display), | 402 egl_display_(egl_display), |
| 407 egl_context_(egl_context), | 403 get_gl_context_cb_(get_gl_context_cb), |
| 404 make_context_current_cb_(make_context_current_cb), |
| 408 weak_this_factory_(this) { | 405 weak_this_factory_(this) { |
| 409 weak_this_ = weak_this_factory_.GetWeakPtr(); | 406 weak_this_ = weak_this_factory_.GetWeakPtr(); |
| 410 } | 407 } |
| 411 | 408 |
| 412 V4L2SliceVideoDecodeAccelerator::~V4L2SliceVideoDecodeAccelerator() { | 409 V4L2SliceVideoDecodeAccelerator::~V4L2SliceVideoDecodeAccelerator() { |
| 413 DVLOGF(2); | 410 DVLOGF(2); |
| 414 | 411 |
| 415 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 412 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
| 416 DCHECK(!decoder_thread_.IsRunning()); | 413 DCHECK(!decoder_thread_.IsRunning()); |
| 417 DCHECK(!device_poll_thread_.IsRunning()); | 414 DCHECK(!device_poll_thread_.IsRunning()); |
| (...skipping 15 matching lines...) Expand all Loading... |
| 433 client_ptr_factory_.reset(); | 430 client_ptr_factory_.reset(); |
| 434 } | 431 } |
| 435 } | 432 } |
| 436 | 433 |
| 437 bool V4L2SliceVideoDecodeAccelerator::Initialize(const Config& config, | 434 bool V4L2SliceVideoDecodeAccelerator::Initialize(const Config& config, |
| 438 Client* client) { | 435 Client* client) { |
| 439 DVLOGF(3) << "profile: " << config.profile; | 436 DVLOGF(3) << "profile: " << config.profile; |
| 440 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 437 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
| 441 DCHECK_EQ(state_, kUninitialized); | 438 DCHECK_EQ(state_, kUninitialized); |
| 442 | 439 |
| 440 if (get_gl_context_cb_.is_null() || make_context_current_cb_.is_null()) { |
| 441 NOTREACHED() << "GL callbacks are required for this VDA"; |
| 442 return false; |
| 443 } |
| 444 |
| 443 if (config.is_encrypted) { | 445 if (config.is_encrypted) { |
| 444 NOTREACHED() << "Encrypted streams are not supported for this VDA"; | 446 NOTREACHED() << "Encrypted streams are not supported for this VDA"; |
| 445 return false; | 447 return false; |
| 446 } | 448 } |
| 447 | 449 |
| 448 if (!device_->SupportsDecodeProfileForV4L2PixelFormats( | 450 if (!device_->SupportsDecodeProfileForV4L2PixelFormats( |
| 449 config.profile, arraysize(supported_input_fourccs_), | 451 config.profile, arraysize(supported_input_fourccs_), |
| 450 supported_input_fourccs_)) { | 452 supported_input_fourccs_)) { |
| 451 DVLOGF(1) << "unsupported profile " << config.profile; | 453 DVLOGF(1) << "unsupported profile " << config.profile; |
| 452 return false; | 454 return false; |
| 453 } | 455 } |
| 454 | 456 |
| 455 client_ptr_factory_.reset( | 457 client_ptr_factory_.reset( |
| 456 new base::WeakPtrFactory<VideoDecodeAccelerator::Client>(client)); | 458 new base::WeakPtrFactory<VideoDecodeAccelerator::Client>(client)); |
| 457 client_ = client_ptr_factory_->GetWeakPtr(); | 459 client_ = client_ptr_factory_->GetWeakPtr(); |
| 460 // If we haven't been set up to decode on separate thread via |
| 461 // TryToSetupDecodeOnSeparateThread(), use the main thread/client for |
| 462 // decode tasks. |
| 463 if (!decode_task_runner_) { |
| 464 decode_task_runner_ = child_task_runner_; |
| 465 DCHECK(!decode_client_); |
| 466 decode_client_ = client_; |
| 467 } |
| 458 | 468 |
| 459 video_profile_ = config.profile; | 469 video_profile_ = config.profile; |
| 460 | 470 |
| 461 if (video_profile_ >= media::H264PROFILE_MIN && | 471 if (video_profile_ >= media::H264PROFILE_MIN && |
| 462 video_profile_ <= media::H264PROFILE_MAX) { | 472 video_profile_ <= media::H264PROFILE_MAX) { |
| 463 h264_accelerator_.reset(new V4L2H264Accelerator(this)); | 473 h264_accelerator_.reset(new V4L2H264Accelerator(this)); |
| 464 decoder_.reset(new H264Decoder(h264_accelerator_.get())); | 474 decoder_.reset(new H264Decoder(h264_accelerator_.get())); |
| 465 } else if (video_profile_ >= media::VP8PROFILE_MIN && | 475 } else if (video_profile_ >= media::VP8PROFILE_MIN && |
| 466 video_profile_ <= media::VP8PROFILE_MAX) { | 476 video_profile_ <= media::VP8PROFILE_MAX) { |
| 467 vp8_accelerator_.reset(new V4L2VP8Accelerator(this)); | 477 vp8_accelerator_.reset(new V4L2VP8Accelerator(this)); |
| 468 decoder_.reset(new VP8Decoder(vp8_accelerator_.get())); | 478 decoder_.reset(new VP8Decoder(vp8_accelerator_.get())); |
| 469 } else { | 479 } else { |
| 470 NOTREACHED() << "Unsupported profile " << video_profile_; | 480 NOTREACHED() << "Unsupported profile " << video_profile_; |
| 471 return false; | 481 return false; |
| 472 } | 482 } |
| 473 | 483 |
| 474 // TODO(posciak): This needs to be queried once supported. | 484 // TODO(posciak): This needs to be queried once supported. |
| 475 input_planes_count_ = 1; | 485 input_planes_count_ = 1; |
| 476 output_planes_count_ = 1; | 486 output_planes_count_ = 1; |
| 477 | 487 |
| 478 if (egl_display_ == EGL_NO_DISPLAY) { | 488 if (egl_display_ == EGL_NO_DISPLAY) { |
| 479 LOG(ERROR) << "Initialize(): could not get EGLDisplay"; | 489 LOG(ERROR) << "Initialize(): could not get EGLDisplay"; |
| 480 return false; | 490 return false; |
| 481 } | 491 } |
| 482 | 492 |
| 483 // We need the context to be initialized to query extensions. | 493 // We need the context to be initialized to query extensions. |
| 484 if (!make_context_current_.Run()) { | 494 if (!make_context_current_cb_.Run()) { |
| 485 LOG(ERROR) << "Initialize(): could not make context current"; | 495 LOG(ERROR) << "Initialize(): could not make context current"; |
| 486 return false; | 496 return false; |
| 487 } | 497 } |
| 488 | 498 |
| 489 if (!gfx::g_driver_egl.ext.b_EGL_KHR_fence_sync) { | 499 if (!gfx::g_driver_egl.ext.b_EGL_KHR_fence_sync) { |
| 490 LOG(ERROR) << "Initialize(): context does not have EGL_KHR_fence_sync"; | 500 LOG(ERROR) << "Initialize(): context does not have EGL_KHR_fence_sync"; |
| 491 return false; | 501 return false; |
| 492 } | 502 } |
| 493 | 503 |
| 494 // Capabilities check. | 504 // Capabilities check. |
| (...skipping 683 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1178 decoder_display_queue_.pop(); | 1188 decoder_display_queue_.pop(); |
| 1179 | 1189 |
| 1180 DVLOGF(3) << "Device poll stopped"; | 1190 DVLOGF(3) << "Device poll stopped"; |
| 1181 return true; | 1191 return true; |
| 1182 } | 1192 } |
| 1183 | 1193 |
| 1184 void V4L2SliceVideoDecodeAccelerator::Decode( | 1194 void V4L2SliceVideoDecodeAccelerator::Decode( |
| 1185 const media::BitstreamBuffer& bitstream_buffer) { | 1195 const media::BitstreamBuffer& bitstream_buffer) { |
| 1186 DVLOGF(3) << "input_id=" << bitstream_buffer.id() | 1196 DVLOGF(3) << "input_id=" << bitstream_buffer.id() |
| 1187 << ", size=" << bitstream_buffer.size(); | 1197 << ", size=" << bitstream_buffer.size(); |
| 1188 DCHECK(io_task_runner_->BelongsToCurrentThread()); | 1198 DCHECK(decode_task_runner_->BelongsToCurrentThread()); |
| 1189 | 1199 |
| 1190 if (bitstream_buffer.id() < 0) { | 1200 if (bitstream_buffer.id() < 0) { |
| 1191 LOG(ERROR) << "Invalid bitstream_buffer, id: " << bitstream_buffer.id(); | 1201 LOG(ERROR) << "Invalid bitstream_buffer, id: " << bitstream_buffer.id(); |
| 1192 if (base::SharedMemory::IsHandleValid(bitstream_buffer.handle())) | 1202 if (base::SharedMemory::IsHandleValid(bitstream_buffer.handle())) |
| 1193 base::SharedMemory::CloseHandle(bitstream_buffer.handle()); | 1203 base::SharedMemory::CloseHandle(bitstream_buffer.handle()); |
| 1194 NOTIFY_ERROR(INVALID_ARGUMENT); | 1204 NOTIFY_ERROR(INVALID_ARGUMENT); |
| 1195 return; | 1205 return; |
| 1196 } | 1206 } |
| 1197 | 1207 |
| 1198 decoder_thread_task_runner_->PostTask( | 1208 decoder_thread_task_runner_->PostTask( |
| 1199 FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::DecodeTask, | 1209 FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::DecodeTask, |
| 1200 base::Unretained(this), bitstream_buffer)); | 1210 base::Unretained(this), bitstream_buffer)); |
| 1201 } | 1211 } |
| 1202 | 1212 |
| 1203 void V4L2SliceVideoDecodeAccelerator::DecodeTask( | 1213 void V4L2SliceVideoDecodeAccelerator::DecodeTask( |
| 1204 const media::BitstreamBuffer& bitstream_buffer) { | 1214 const media::BitstreamBuffer& bitstream_buffer) { |
| 1205 DVLOGF(3) << "input_id=" << bitstream_buffer.id() | 1215 DVLOGF(3) << "input_id=" << bitstream_buffer.id() |
| 1206 << " size=" << bitstream_buffer.size(); | 1216 << " size=" << bitstream_buffer.size(); |
| 1207 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 1217 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
| 1208 | 1218 |
| 1209 scoped_ptr<BitstreamBufferRef> bitstream_record(new BitstreamBufferRef( | 1219 scoped_ptr<BitstreamBufferRef> bitstream_record(new BitstreamBufferRef( |
| 1210 io_client_, io_task_runner_, | 1220 decode_client_, decode_task_runner_, |
| 1211 new SharedMemoryRegion(bitstream_buffer, true), bitstream_buffer.id())); | 1221 new SharedMemoryRegion(bitstream_buffer, true), bitstream_buffer.id())); |
| 1212 if (!bitstream_record->shm->Map()) { | 1222 if (!bitstream_record->shm->Map()) { |
| 1213 LOGF(ERROR) << "Could not map bitstream_buffer"; | 1223 LOGF(ERROR) << "Could not map bitstream_buffer"; |
| 1214 NOTIFY_ERROR(UNREADABLE_INPUT); | 1224 NOTIFY_ERROR(UNREADABLE_INPUT); |
| 1215 return; | 1225 return; |
| 1216 } | 1226 } |
| 1217 DVLOGF(3) << "mapped at=" << bitstream_record->shm->memory(); | 1227 DVLOGF(3) << "mapped at=" << bitstream_record->shm->memory(); |
| 1218 | 1228 |
| 1219 decoder_input_queue_.push( | 1229 decoder_input_queue_.push( |
| 1220 linked_ptr<BitstreamBufferRef>(bitstream_record.release())); | 1230 linked_ptr<BitstreamBufferRef>(bitstream_record.release())); |
| (...skipping 260 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1481 output_planes_count_)); | 1491 output_planes_count_)); |
| 1482 } | 1492 } |
| 1483 | 1493 |
| 1484 void V4L2SliceVideoDecodeAccelerator::CreateEGLImages( | 1494 void V4L2SliceVideoDecodeAccelerator::CreateEGLImages( |
| 1485 const std::vector<media::PictureBuffer>& buffers, | 1495 const std::vector<media::PictureBuffer>& buffers, |
| 1486 uint32_t output_format_fourcc, | 1496 uint32_t output_format_fourcc, |
| 1487 size_t output_planes_count) { | 1497 size_t output_planes_count) { |
| 1488 DVLOGF(3); | 1498 DVLOGF(3); |
| 1489 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 1499 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
| 1490 | 1500 |
| 1491 if (!make_context_current_.Run()) { | 1501 gfx::GLContext* gl_context = get_gl_context_cb_.Run(); |
| 1492 DLOG(ERROR) << "could not make context current"; | 1502 if (!gl_context || !make_context_current_cb_.Run()) { |
| 1503 DLOG(ERROR) << "No GL context"; |
| 1493 NOTIFY_ERROR(PLATFORM_FAILURE); | 1504 NOTIFY_ERROR(PLATFORM_FAILURE); |
| 1494 return; | 1505 return; |
| 1495 } | 1506 } |
| 1496 | 1507 |
| 1497 gfx::ScopedTextureBinder bind_restore(GL_TEXTURE_EXTERNAL_OES, 0); | 1508 gfx::ScopedTextureBinder bind_restore(GL_TEXTURE_EXTERNAL_OES, 0); |
| 1498 | 1509 |
| 1499 std::vector<EGLImageKHR> egl_images; | 1510 std::vector<EGLImageKHR> egl_images; |
| 1500 for (size_t i = 0; i < buffers.size(); ++i) { | 1511 for (size_t i = 0; i < buffers.size(); ++i) { |
| 1501 EGLImageKHR egl_image = device_->CreateEGLImage(egl_display_, | 1512 EGLImageKHR egl_image = device_->CreateEGLImage(egl_display_, |
| 1502 egl_context_, | 1513 gl_context->GetHandle(), |
| 1503 buffers[i].texture_id(), | 1514 buffers[i].texture_id(), |
| 1504 buffers[i].size(), | 1515 buffers[i].size(), |
| 1505 i, | 1516 i, |
| 1506 output_format_fourcc, | 1517 output_format_fourcc, |
| 1507 output_planes_count); | 1518 output_planes_count); |
| 1508 if (egl_image == EGL_NO_IMAGE_KHR) { | 1519 if (egl_image == EGL_NO_IMAGE_KHR) { |
| 1509 LOGF(ERROR) << "Could not create EGLImageKHR"; | 1520 LOGF(ERROR) << "Could not create EGLImageKHR"; |
| 1510 for (const auto& image_to_destroy : egl_images) | 1521 for (const auto& image_to_destroy : egl_images) |
| 1511 device_->DestroyEGLImage(egl_display_, image_to_destroy); | 1522 device_->DestroyEGLImage(egl_display_, image_to_destroy); |
| 1512 | 1523 |
| (...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1558 } | 1569 } |
| 1559 | 1570 |
| 1560 ProcessPendingEventsIfNeeded(); | 1571 ProcessPendingEventsIfNeeded(); |
| 1561 } | 1572 } |
| 1562 | 1573 |
| 1563 void V4L2SliceVideoDecodeAccelerator::ReusePictureBuffer( | 1574 void V4L2SliceVideoDecodeAccelerator::ReusePictureBuffer( |
| 1564 int32_t picture_buffer_id) { | 1575 int32_t picture_buffer_id) { |
| 1565 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 1576 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
| 1566 DVLOGF(4) << "picture_buffer_id=" << picture_buffer_id; | 1577 DVLOGF(4) << "picture_buffer_id=" << picture_buffer_id; |
| 1567 | 1578 |
| 1568 if (!make_context_current_.Run()) { | 1579 if (!make_context_current_cb_.Run()) { |
| 1569 LOGF(ERROR) << "could not make context current"; | 1580 LOGF(ERROR) << "could not make context current"; |
| 1570 NOTIFY_ERROR(PLATFORM_FAILURE); | 1581 NOTIFY_ERROR(PLATFORM_FAILURE); |
| 1571 return; | 1582 return; |
| 1572 } | 1583 } |
| 1573 | 1584 |
| 1574 EGLSyncKHR egl_sync = | 1585 EGLSyncKHR egl_sync = |
| 1575 eglCreateSyncKHR(egl_display_, EGL_SYNC_FENCE_KHR, NULL); | 1586 eglCreateSyncKHR(egl_display_, EGL_SYNC_FENCE_KHR, NULL); |
| 1576 if (egl_sync == EGL_NO_SYNC_KHR) { | 1587 if (egl_sync == EGL_NO_SYNC_KHR) { |
| 1577 LOGF(ERROR) << "eglCreateSyncKHR() failed"; | 1588 LOGF(ERROR) << "eglCreateSyncKHR() failed"; |
| 1578 NOTIFY_ERROR(PLATFORM_FAILURE); | 1589 NOTIFY_ERROR(PLATFORM_FAILURE); |
| (...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1634 | 1645 |
| 1635 void V4L2SliceVideoDecodeAccelerator::FlushTask() { | 1646 void V4L2SliceVideoDecodeAccelerator::FlushTask() { |
| 1636 DVLOGF(3); | 1647 DVLOGF(3); |
| 1637 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 1648 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
| 1638 | 1649 |
| 1639 if (!decoder_input_queue_.empty()) { | 1650 if (!decoder_input_queue_.empty()) { |
| 1640 // We are not done with pending inputs, so queue an empty buffer, | 1651 // We are not done with pending inputs, so queue an empty buffer, |
| 1641 // which - when reached - will trigger flush sequence. | 1652 // which - when reached - will trigger flush sequence. |
| 1642 decoder_input_queue_.push( | 1653 decoder_input_queue_.push( |
| 1643 linked_ptr<BitstreamBufferRef>(new BitstreamBufferRef( | 1654 linked_ptr<BitstreamBufferRef>(new BitstreamBufferRef( |
| 1644 io_client_, io_task_runner_, nullptr, kFlushBufferId))); | 1655 decode_client_, decode_task_runner_, nullptr, kFlushBufferId))); |
| 1645 return; | 1656 return; |
| 1646 } | 1657 } |
| 1647 | 1658 |
| 1648 // No more inputs pending, so just finish flushing here. | 1659 // No more inputs pending, so just finish flushing here. |
| 1649 InitiateFlush(); | 1660 InitiateFlush(); |
| 1650 } | 1661 } |
| 1651 | 1662 |
| 1652 void V4L2SliceVideoDecodeAccelerator::InitiateFlush() { | 1663 void V4L2SliceVideoDecodeAccelerator::InitiateFlush() { |
| 1653 DVLOGF(3); | 1664 DVLOGF(3); |
| 1654 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 1665 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
| (...skipping 890 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2545 } | 2556 } |
| 2546 | 2557 |
| 2547 void V4L2SliceVideoDecodeAccelerator::SendPictureReady() { | 2558 void V4L2SliceVideoDecodeAccelerator::SendPictureReady() { |
| 2548 DVLOGF(3); | 2559 DVLOGF(3); |
| 2549 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 2560 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
| 2550 bool resetting_or_flushing = (decoder_resetting_ || decoder_flushing_); | 2561 bool resetting_or_flushing = (decoder_resetting_ || decoder_flushing_); |
| 2551 while (!pending_picture_ready_.empty()) { | 2562 while (!pending_picture_ready_.empty()) { |
| 2552 bool cleared = pending_picture_ready_.front().cleared; | 2563 bool cleared = pending_picture_ready_.front().cleared; |
| 2553 const media::Picture& picture = pending_picture_ready_.front().picture; | 2564 const media::Picture& picture = pending_picture_ready_.front().picture; |
| 2554 if (cleared && picture_clearing_count_ == 0) { | 2565 if (cleared && picture_clearing_count_ == 0) { |
| 2555 DVLOGF(4) << "Posting picture ready to IO for: " | 2566 DVLOGF(4) << "Posting picture ready to decode task runner for: " |
| 2556 << picture.picture_buffer_id(); | 2567 << picture.picture_buffer_id(); |
| 2557 // This picture is cleared. Post it to IO thread to reduce latency. This | 2568 // This picture is cleared. It can be posted to a thread different than |
| 2558 // should be the case after all pictures are cleared at the beginning. | 2569 // the main GPU thread to reduce latency. This should be the case after |
| 2559 io_task_runner_->PostTask( | 2570 // all pictures are cleared at the beginning. |
| 2560 FROM_HERE, base::Bind(&Client::PictureReady, io_client_, picture)); | 2571 decode_task_runner_->PostTask( |
| 2572 FROM_HERE, |
| 2573 base::Bind(&Client::PictureReady, decode_client_, picture)); |
| 2561 pending_picture_ready_.pop(); | 2574 pending_picture_ready_.pop(); |
| 2562 } else if (!cleared || resetting_or_flushing) { | 2575 } else if (!cleared || resetting_or_flushing) { |
| 2563 DVLOGF(3) << "cleared=" << pending_picture_ready_.front().cleared | 2576 DVLOGF(3) << "cleared=" << pending_picture_ready_.front().cleared |
| 2564 << ", decoder_resetting_=" << decoder_resetting_ | 2577 << ", decoder_resetting_=" << decoder_resetting_ |
| 2565 << ", decoder_flushing_=" << decoder_flushing_ | 2578 << ", decoder_flushing_=" << decoder_flushing_ |
| 2566 << ", picture_clearing_count_=" << picture_clearing_count_; | 2579 << ", picture_clearing_count_=" << picture_clearing_count_; |
| 2567 DVLOGF(4) << "Posting picture ready to GPU for: " | 2580 DVLOGF(4) << "Posting picture ready to GPU for: " |
| 2568 << picture.picture_buffer_id(); | 2581 << picture.picture_buffer_id(); |
| 2569 // If the picture is not cleared, post it to the child thread because it | 2582 // If the picture is not cleared, post it to the child thread because it |
| 2570 // has to be cleared in the child thread. A picture only needs to be | 2583 // has to be cleared in the child thread. A picture only needs to be |
| (...skipping 17 matching lines...) Expand all Loading... |
| 2588 } | 2601 } |
| 2589 | 2602 |
| 2590 void V4L2SliceVideoDecodeAccelerator::PictureCleared() { | 2603 void V4L2SliceVideoDecodeAccelerator::PictureCleared() { |
| 2591 DVLOGF(3) << "clearing count=" << picture_clearing_count_; | 2604 DVLOGF(3) << "clearing count=" << picture_clearing_count_; |
| 2592 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 2605 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
| 2593 DCHECK_GT(picture_clearing_count_, 0); | 2606 DCHECK_GT(picture_clearing_count_, 0); |
| 2594 picture_clearing_count_--; | 2607 picture_clearing_count_--; |
| 2595 SendPictureReady(); | 2608 SendPictureReady(); |
| 2596 } | 2609 } |
| 2597 | 2610 |
| 2598 bool V4L2SliceVideoDecodeAccelerator::CanDecodeOnIOThread() { | 2611 bool V4L2SliceVideoDecodeAccelerator::TryToSetupDecodeOnSeparateThread( |
| 2612 const base::WeakPtr<Client>& decode_client, |
| 2613 const scoped_refptr<base::SingleThreadTaskRunner>& decode_task_runner) { |
| 2614 decode_client_ = decode_client_; |
| 2615 decode_task_runner_ = decode_task_runner; |
| 2599 return true; | 2616 return true; |
| 2600 } | 2617 } |
| 2601 | 2618 |
| 2602 // static | 2619 // static |
| 2603 media::VideoDecodeAccelerator::SupportedProfiles | 2620 media::VideoDecodeAccelerator::SupportedProfiles |
| 2604 V4L2SliceVideoDecodeAccelerator::GetSupportedProfiles() { | 2621 V4L2SliceVideoDecodeAccelerator::GetSupportedProfiles() { |
| 2605 scoped_refptr<V4L2Device> device = V4L2Device::Create(V4L2Device::kDecoder); | 2622 scoped_refptr<V4L2Device> device = V4L2Device::Create(V4L2Device::kDecoder); |
| 2606 if (!device) | 2623 if (!device) |
| 2607 return SupportedProfiles(); | 2624 return SupportedProfiles(); |
| 2608 | 2625 |
| 2609 return device->GetSupportedDecodeProfiles(arraysize(supported_input_fourccs_), | 2626 return device->GetSupportedDecodeProfiles(arraysize(supported_input_fourccs_), |
| 2610 supported_input_fourccs_); | 2627 supported_input_fourccs_); |
| 2611 } | 2628 } |
| 2612 | 2629 |
| 2613 } // namespace content | 2630 } // namespace content |
| OLD | NEW |