| Index: content/common/gpu/media/android_video_decode_accelerator_base.cc
|
| diff --git a/content/common/gpu/media/android_video_decode_accelerator.cc b/content/common/gpu/media/android_video_decode_accelerator_base.cc
|
| similarity index 72%
|
| copy from content/common/gpu/media/android_video_decode_accelerator.cc
|
| copy to content/common/gpu/media/android_video_decode_accelerator_base.cc
|
| index ce5c20cc3af754a5d08db5d7e766e6b78ec10723..ab243f9f0df9a17b53b593180ae9b17e5d7243bf 100644
|
| --- a/content/common/gpu/media/android_video_decode_accelerator.cc
|
| +++ b/content/common/gpu/media/android_video_decode_accelerator_base.cc
|
| @@ -1,14 +1,15 @@
|
| -// Copyright (c) 2013 The Chromium Authors. All rights reserved.
|
| +// Copyright (c) 2015 The Chromium Authors. All rights reserved.
|
| // Use of this source code is governed by a BSD-style license that can be
|
| // found in the LICENSE file.
|
|
|
| -#include "content/common/gpu/media/android_video_decode_accelerator.h"
|
| +#include "content/common/gpu/media/android_video_decode_accelerator_base.h"
|
|
|
| #include "base/bind.h"
|
| #include "base/logging.h"
|
| #include "base/message_loop/message_loop.h"
|
| #include "base/metrics/histogram.h"
|
| #include "content/common/gpu/gpu_channel.h"
|
| +#include "content/common/gpu/media/avda_return_on_failure.h"
|
| #include "gpu/command_buffer/service/gles2_cmd_decoder.h"
|
| #include "media/base/bitstream_buffer.h"
|
| #include "media/base/limits.h"
|
| @@ -20,27 +21,6 @@
|
|
|
| namespace content {
|
|
|
| -// Helper macros for dealing with failure. If |result| evaluates false, emit
|
| -// |log| to ERROR, register |error| with the decoder, and return.
|
| -#define RETURN_ON_FAILURE(result, log, error) \
|
| - do { \
|
| - if (!(result)) { \
|
| - DLOG(ERROR) << log; \
|
| - base::MessageLoop::current()->PostTask( \
|
| - FROM_HERE, \
|
| - base::Bind(&AndroidVideoDecodeAccelerator::NotifyError, \
|
| - weak_this_factory_.GetWeakPtr(), \
|
| - error)); \
|
| - state_ = ERROR; \
|
| - return; \
|
| - } \
|
| - } while (0)
|
| -
|
| -// TODO(dwkang): We only need kMaxVideoFrames to pass media stack's prerolling
|
| -// phase, but 1 is added due to crbug.com/176036. This should be tuned when we
|
| -// have actual use case.
|
| -enum { kNumPictureBuffers = media::limits::kMaxVideoFrames + 1 };
|
| -
|
| // Max number of bitstreams notified to the client with
|
| // NotifyEndOfBitstreamBuffer() before getting output from the bitstream.
|
| enum { kMaxBitstreamsNotifiedInAdvance = 32 };
|
| @@ -87,7 +67,7 @@ static inline const base::TimeDelta NoWaitTimeOut() {
|
| return base::TimeDelta::FromMicroseconds(0);
|
| }
|
|
|
| -AndroidVideoDecodeAccelerator::AndroidVideoDecodeAccelerator(
|
| +AndroidVideoDecodeAcceleratorBase::AndroidVideoDecodeAcceleratorBase(
|
| const base::WeakPtr<gpu::gles2::GLES2Decoder> decoder,
|
| const base::Callback<bool(void)>& make_context_current)
|
| : client_(NULL),
|
| @@ -99,12 +79,13 @@ AndroidVideoDecodeAccelerator::AndroidVideoDecodeAccelerator(
|
| gl_decoder_(decoder),
|
| weak_this_factory_(this) {}
|
|
|
| -AndroidVideoDecodeAccelerator::~AndroidVideoDecodeAccelerator() {
|
| +AndroidVideoDecodeAcceleratorBase::~AndroidVideoDecodeAcceleratorBase() {
|
| DCHECK(thread_checker_.CalledOnValidThread());
|
| }
|
|
|
| -bool AndroidVideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile,
|
| - Client* client) {
|
| +bool AndroidVideoDecodeAcceleratorBase::Initialize(
|
| + media::VideoCodecProfile profile,
|
| + Client* client) {
|
| DCHECK(!media_codec_);
|
| DCHECK(thread_checker_.CalledOnValidThread());
|
|
|
| @@ -164,7 +145,7 @@ bool AndroidVideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile,
|
| return true;
|
| }
|
|
|
| -void AndroidVideoDecodeAccelerator::DoIOTask() {
|
| +void AndroidVideoDecodeAcceleratorBase::DoIOTask() {
|
| DCHECK(thread_checker_.CalledOnValidThread());
|
| if (state_ == ERROR) {
|
| return;
|
| @@ -174,7 +155,7 @@ void AndroidVideoDecodeAccelerator::DoIOTask() {
|
| DequeueOutput();
|
| }
|
|
|
| -void AndroidVideoDecodeAccelerator::QueueInput() {
|
| +void AndroidVideoDecodeAcceleratorBase::QueueInput() {
|
| DCHECK(thread_checker_.CalledOnValidThread());
|
| if (bitstreams_notified_in_advance_.size() > kMaxBitstreamsNotifiedInAdvance)
|
| return;
|
| @@ -233,13 +214,13 @@ void AndroidVideoDecodeAccelerator::QueueInput() {
|
| // TODO(dwkang): check if there is a way to remove this workaround.
|
| base::MessageLoop::current()->PostTask(
|
| FROM_HERE,
|
| - base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer,
|
| + base::Bind(&AndroidVideoDecodeAcceleratorBase::NotifyEndOfBitstreamBuffer,
|
| weak_this_factory_.GetWeakPtr(),
|
| bitstream_buffer.id()));
|
| bitstreams_notified_in_advance_.push_back(bitstream_buffer.id());
|
| }
|
|
|
| -void AndroidVideoDecodeAccelerator::DequeueOutput() {
|
| +void AndroidVideoDecodeAcceleratorBase::DequeueOutput() {
|
| DCHECK(thread_checker_.CalledOnValidThread());
|
| if (picturebuffers_requested_ && output_picture_buffers_.empty())
|
| return;
|
| @@ -272,7 +253,8 @@ void AndroidVideoDecodeAccelerator::DequeueOutput() {
|
| size_ = gfx::Size(width, height);
|
| base::MessageLoop::current()->PostTask(
|
| FROM_HERE,
|
| - base::Bind(&AndroidVideoDecodeAccelerator::RequestPictureBuffers,
|
| + base::Bind(&AndroidVideoDecodeAcceleratorBase::
|
| + RequestPictureBuffers,
|
| weak_this_factory_.GetWeakPtr()));
|
| } else {
|
| // Dynamic resolution change support is not specified by the Android
|
| @@ -300,30 +282,21 @@ void AndroidVideoDecodeAccelerator::DequeueOutput() {
|
| }
|
| } while (buf_index < 0);
|
|
|
| - // This ignores the emitted ByteBuffer and instead relies on rendering to the
|
| - // codec's SurfaceTexture and then copying from that texture to the client's
|
| - // PictureBuffer's texture. This means that each picture's data is written
|
| - // three times: once to the ByteBuffer, once to the SurfaceTexture, and once
|
| - // to the client's texture. It would be nicer to either:
|
| - // 1) Render directly to the client's texture from MediaCodec (one write); or
|
| - // 2) Upload the ByteBuffer to the client's texture (two writes).
|
| - // Unfortunately neither is possible:
|
| - // 1) MediaCodec's use of SurfaceTexture is a singleton, and the texture
|
| - // written to can't change during the codec's lifetime. b/11990461
|
| - // 2) The ByteBuffer is likely to contain the pixels in a vendor-specific,
|
| - // opaque/non-standard format. It's not possible to negotiate the decoder
|
| - // to emit a specific colorspace, even using HW CSC. b/10706245
|
| - // So, we live with these two extra copies per picture :(
|
| - media_codec_->ReleaseOutputBuffer(buf_index, true);
|
| -
|
| if (eos) {
|
| + // TODO(liberato): Before refactoring into *Base, this was unconditionally
|
| + // done before the eos check, with render==true. However, since that
|
| + // frame wasn't sent anywhere in the eos case, we now do it here with
|
| + // render==false. We need to see if eos can actually deliver a valid
|
| + // frame with it.
|
| + media_codec_->ReleaseOutputBuffer(buf_index, false);
|
| base::MessageLoop::current()->PostTask(
|
| FROM_HERE,
|
| - base::Bind(&AndroidVideoDecodeAccelerator::NotifyFlushDone,
|
| + base::Bind(&AndroidVideoDecodeAcceleratorBase::NotifyFlushDone,
|
| weak_this_factory_.GetWeakPtr()));
|
| } else {
|
| int64 bitstream_buffer_id = timestamp.InMicroseconds();
|
| - SendCurrentSurfaceToClient(static_cast<int32>(bitstream_buffer_id));
|
| + SendCurrentSurfaceToClient(buf_index,
|
| + static_cast<int32>(bitstream_buffer_id));
|
|
|
| // Removes ids former or equal than the id from decoder. Note that
|
| // |bitstreams_notified_in_advance_| does not mean bitstream ids in decoder
|
| @@ -342,7 +315,8 @@ void AndroidVideoDecodeAccelerator::DequeueOutput() {
|
| }
|
| }
|
|
|
| -void AndroidVideoDecodeAccelerator::SendCurrentSurfaceToClient(
|
| +void AndroidVideoDecodeAcceleratorBase::SendCurrentSurfaceToClient(
|
| + int32 codec_buffer_index,
|
| int32 bitstream_id) {
|
| DCHECK(thread_checker_.CalledOnValidThread());
|
| DCHECK_NE(bitstream_id, -1);
|
| @@ -355,67 +329,32 @@ void AndroidVideoDecodeAccelerator::SendCurrentSurfaceToClient(
|
| int32 picture_buffer_id = free_picture_ids_.front();
|
| free_picture_ids_.pop();
|
|
|
| - float transfrom_matrix[16];
|
| - surface_texture_->UpdateTexImage();
|
| - surface_texture_->GetTransformMatrix(transfrom_matrix);
|
| -
|
| OutputBufferMap::const_iterator i =
|
| output_picture_buffers_.find(picture_buffer_id);
|
| RETURN_ON_FAILURE(i != output_picture_buffers_.end(),
|
| "Can't find a PictureBuffer for " << picture_buffer_id,
|
| PLATFORM_FAILURE);
|
| - uint32 picture_buffer_texture_id = i->second.texture_id();
|
| -
|
| - RETURN_ON_FAILURE(gl_decoder_.get(),
|
| - "Failed to get gles2 decoder instance.",
|
| - ILLEGAL_STATE);
|
| - // Defer initializing the CopyTextureCHROMIUMResourceManager until it is
|
| - // needed because it takes 10s of milliseconds to initialize.
|
| - if (!copier_) {
|
| - copier_.reset(new gpu::CopyTextureCHROMIUMResourceManager());
|
| - copier_->Initialize(gl_decoder_.get());
|
| - }
|
|
|
| - // Here, we copy |surface_texture_id_| to the picture buffer instead of
|
| - // setting new texture to |surface_texture_| by calling attachToGLContext()
|
| - // because:
|
| - // 1. Once we call detachFrameGLContext(), it deletes the texture previous
|
| - // attached.
|
| - // 2. SurfaceTexture requires us to apply a transform matrix when we show
|
| - // the texture.
|
| - // TODO(hkuang): get the StreamTexture transform matrix in GPU process
|
| - // instead of using default matrix crbug.com/226218.
|
| - const static GLfloat default_matrix[16] = {1.0f, 0.0f, 0.0f, 0.0f,
|
| - 0.0f, 1.0f, 0.0f, 0.0f,
|
| - 0.0f, 0.0f, 1.0f, 0.0f,
|
| - 0.0f, 0.0f, 0.0f, 1.0f};
|
| - copier_->DoCopyTextureWithTransform(gl_decoder_.get(),
|
| - GL_TEXTURE_EXTERNAL_OES,
|
| - surface_texture_id_,
|
| - picture_buffer_texture_id,
|
| - size_.width(),
|
| - size_.height(),
|
| - false,
|
| - false,
|
| - false,
|
| - default_matrix);
|
| + AssignCurrentSurfaceToPictureBuffer(codec_buffer_index, i->second);
|
|
|
| // TODO(henryhsu): Pass (0, 0) as visible size will cause several test
|
| // cases failed. We should make sure |size_| is coded size or visible size.
|
| base::MessageLoop::current()->PostTask(
|
| - FROM_HERE, base::Bind(&AndroidVideoDecodeAccelerator::NotifyPictureReady,
|
| - weak_this_factory_.GetWeakPtr(),
|
| - media::Picture(picture_buffer_id, bitstream_id,
|
| - gfx::Rect(size_), false)));
|
| + FROM_HERE,
|
| + base::Bind(&AndroidVideoDecodeAcceleratorBase::NotifyPictureReady,
|
| + weak_this_factory_.GetWeakPtr(),
|
| + media::Picture(picture_buffer_id, bitstream_id,
|
| + gfx::Rect(size_), false)));
|
| }
|
|
|
| -void AndroidVideoDecodeAccelerator::Decode(
|
| +void AndroidVideoDecodeAcceleratorBase::Decode(
|
| const media::BitstreamBuffer& bitstream_buffer) {
|
| DCHECK(thread_checker_.CalledOnValidThread());
|
| if (bitstream_buffer.id() != -1 && bitstream_buffer.size() == 0) {
|
| base::MessageLoop::current()->PostTask(
|
| FROM_HERE,
|
| - base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer,
|
| + base::Bind(&AndroidVideoDecodeAcceleratorBase::
|
| + NotifyEndOfBitstreamBuffer,
|
| weak_this_factory_.GetWeakPtr(),
|
| bitstream_buffer.id()));
|
| return;
|
| @@ -427,7 +366,12 @@ void AndroidVideoDecodeAccelerator::Decode(
|
| DoIOTask();
|
| }
|
|
|
| -void AndroidVideoDecodeAccelerator::AssignPictureBuffers(
|
| +void AndroidVideoDecodeAcceleratorBase::RequestPictureBuffers() {
|
| + client_->ProvidePictureBuffers(GetNumPictureBuffers(), size_,
|
| + GetTextureTarget());
|
| +}
|
| +
|
| +void AndroidVideoDecodeAcceleratorBase::AssignPictureBuffers(
|
| const std::vector<media::PictureBuffer>& buffers) {
|
| DCHECK(thread_checker_.CalledOnValidThread());
|
| DCHECK(output_picture_buffers_.empty());
|
| @@ -446,14 +390,14 @@ void AndroidVideoDecodeAccelerator::AssignPictureBuffers(
|
| dismissed_picture_ids_.erase(id);
|
| }
|
|
|
| - RETURN_ON_FAILURE(output_picture_buffers_.size() >= kNumPictureBuffers,
|
| + RETURN_ON_FAILURE(output_picture_buffers_.size() >= GetNumPictureBuffers(),
|
| "Invalid picture buffers were passed.",
|
| INVALID_ARGUMENT);
|
|
|
| DoIOTask();
|
| }
|
|
|
| -void AndroidVideoDecodeAccelerator::ReusePictureBuffer(
|
| +void AndroidVideoDecodeAcceleratorBase::ReusePictureBuffer(
|
| int32 picture_buffer_id) {
|
| DCHECK(thread_checker_.CalledOnValidThread());
|
|
|
| @@ -469,13 +413,13 @@ void AndroidVideoDecodeAccelerator::ReusePictureBuffer(
|
| DoIOTask();
|
| }
|
|
|
| -void AndroidVideoDecodeAccelerator::Flush() {
|
| +void AndroidVideoDecodeAcceleratorBase::Flush() {
|
| DCHECK(thread_checker_.CalledOnValidThread());
|
|
|
| Decode(media::BitstreamBuffer(-1, base::SharedMemoryHandle(), 0));
|
| }
|
|
|
| -bool AndroidVideoDecodeAccelerator::ConfigureMediaCodec() {
|
| +bool AndroidVideoDecodeAcceleratorBase::ConfigureMediaCodec() {
|
| DCHECK(thread_checker_.CalledOnValidThread());
|
| DCHECK(surface_texture_.get());
|
|
|
| @@ -491,11 +435,11 @@ bool AndroidVideoDecodeAccelerator::ConfigureMediaCodec() {
|
| io_timer_.Start(FROM_HERE,
|
| DecodePollDelay(),
|
| this,
|
| - &AndroidVideoDecodeAccelerator::DoIOTask);
|
| + &AndroidVideoDecodeAcceleratorBase::DoIOTask);
|
| return true;
|
| }
|
|
|
| -void AndroidVideoDecodeAccelerator::Reset() {
|
| +void AndroidVideoDecodeAcceleratorBase::Reset() {
|
| DCHECK(thread_checker_.CalledOnValidThread());
|
|
|
| while (!pending_bitstream_buffers_.empty()) {
|
| @@ -505,7 +449,8 @@ void AndroidVideoDecodeAccelerator::Reset() {
|
| if (bitstream_buffer_id != -1) {
|
| base::MessageLoop::current()->PostTask(
|
| FROM_HERE,
|
| - base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer,
|
| + base::Bind(&AndroidVideoDecodeAcceleratorBase::
|
| + NotifyEndOfBitstreamBuffer,
|
| weak_this_factory_.GetWeakPtr(),
|
| bitstream_buffer_id));
|
| }
|
| @@ -535,11 +480,11 @@ void AndroidVideoDecodeAccelerator::Reset() {
|
|
|
| base::MessageLoop::current()->PostTask(
|
| FROM_HERE,
|
| - base::Bind(&AndroidVideoDecodeAccelerator::NotifyResetDone,
|
| + base::Bind(&AndroidVideoDecodeAcceleratorBase::NotifyResetDone,
|
| weak_this_factory_.GetWeakPtr()));
|
| }
|
|
|
| -void AndroidVideoDecodeAccelerator::Destroy() {
|
| +void AndroidVideoDecodeAcceleratorBase::Destroy() {
|
| DCHECK(thread_checker_.CalledOnValidThread());
|
|
|
| weak_this_factory_.InvalidateWeakPtrs();
|
| @@ -549,45 +494,81 @@ void AndroidVideoDecodeAccelerator::Destroy() {
|
| }
|
| if (surface_texture_id_)
|
| glDeleteTextures(1, &surface_texture_id_);
|
| - if (copier_)
|
| - copier_->Destroy();
|
| delete this;
|
| }
|
|
|
| -bool AndroidVideoDecodeAccelerator::CanDecodeOnIOThread() {
|
| +bool AndroidVideoDecodeAcceleratorBase::CanDecodeOnIOThread() {
|
| return false;
|
| }
|
|
|
| -void AndroidVideoDecodeAccelerator::RequestPictureBuffers() {
|
| - client_->ProvidePictureBuffers(kNumPictureBuffers, size_, GL_TEXTURE_2D);
|
| +media::VideoDecodeAccelerator::Client*
|
| +AndroidVideoDecodeAcceleratorBase::GetClient() const {
|
| + return client_;
|
| +}
|
| +
|
| +const gfx::Size& AndroidVideoDecodeAcceleratorBase::GetSize() const {
|
| + return size_;
|
| +}
|
| +
|
| +const base::ThreadChecker&
|
| +AndroidVideoDecodeAcceleratorBase::ThreadChecker() const {
|
| + return thread_checker_;
|
| +}
|
| +
|
| +gfx::SurfaceTexture*
|
| +AndroidVideoDecodeAcceleratorBase::GetSurfaceTexture() const {
|
| + return surface_texture_.get();
|
| +}
|
| +
|
| +uint32 AndroidVideoDecodeAcceleratorBase::GetSurfaceTextureId() const {
|
| + return surface_texture_id_;
|
| +}
|
| +
|
| +gpu::gles2::GLES2Decoder*
|
| +AndroidVideoDecodeAcceleratorBase::GetGlDecoder() const {
|
| + return gl_decoder_.get();
|
| +}
|
| +
|
| +media::VideoCodecBridge* AndroidVideoDecodeAcceleratorBase::GetMediaCodec() {
|
| + return media_codec_.get();
|
| +}
|
| +
|
| +void AndroidVideoDecodeAcceleratorBase::PostError(
|
| + const ::tracked_objects::Location& from_here,
|
| + media::VideoDecodeAccelerator::Error error) {
|
| + base::MessageLoop::current()->PostTask(from_here,
|
| + base::Bind(&AndroidVideoDecodeAcceleratorBase::NotifyError,
|
| + weak_this_factory_.GetWeakPtr(),
|
| + error));
|
| + state_ = ERROR;
|
| }
|
|
|
| -void AndroidVideoDecodeAccelerator::NotifyPictureReady(
|
| +void AndroidVideoDecodeAcceleratorBase::NotifyPictureReady(
|
| const media::Picture& picture) {
|
| client_->PictureReady(picture);
|
| }
|
|
|
| -void AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer(
|
| +void AndroidVideoDecodeAcceleratorBase::NotifyEndOfBitstreamBuffer(
|
| int input_buffer_id) {
|
| client_->NotifyEndOfBitstreamBuffer(input_buffer_id);
|
| }
|
|
|
| -void AndroidVideoDecodeAccelerator::NotifyFlushDone() {
|
| +void AndroidVideoDecodeAcceleratorBase::NotifyFlushDone() {
|
| client_->NotifyFlushDone();
|
| }
|
|
|
| -void AndroidVideoDecodeAccelerator::NotifyResetDone() {
|
| +void AndroidVideoDecodeAcceleratorBase::NotifyResetDone() {
|
| client_->NotifyResetDone();
|
| }
|
|
|
| -void AndroidVideoDecodeAccelerator::NotifyError(
|
| +void AndroidVideoDecodeAcceleratorBase::NotifyError(
|
| media::VideoDecodeAccelerator::Error error) {
|
| client_->NotifyError(error);
|
| }
|
|
|
| // static
|
| media::VideoDecodeAccelerator::SupportedProfiles
|
| -AndroidVideoDecodeAccelerator::GetSupportedProfiles() {
|
| +AndroidVideoDecodeAcceleratorBase::GetSupportedProfiles() {
|
| SupportedProfiles profiles;
|
|
|
| if (!media::VideoCodecBridge::IsKnownUnaccelerated(
|
|
|