Index: content/common/gpu/media/android_video_decode_accelerator_impl.cc |
diff --git a/content/common/gpu/media/android_video_decode_accelerator.cc b/content/common/gpu/media/android_video_decode_accelerator_impl.cc |
similarity index 71% |
rename from content/common/gpu/media/android_video_decode_accelerator.cc |
rename to content/common/gpu/media/android_video_decode_accelerator_impl.cc |
index d5687cf8b5fba9e217d92ed73cdcefbce855b008..ea7c411b2a234ba358a01a7ec2c81c1824e7ad05 100644 |
--- a/content/common/gpu/media/android_video_decode_accelerator.cc |
+++ b/content/common/gpu/media/android_video_decode_accelerator_impl.cc |
@@ -1,14 +1,15 @@ |
-// Copyright (c) 2013 The Chromium Authors. All rights reserved. |
+// Copyright (c) 2015 The Chromium Authors. All rights reserved. |
// Use of this source code is governed by a BSD-style license that can be |
// found in the LICENSE file. |
-#include "content/common/gpu/media/android_video_decode_accelerator.h" |
+#include "content/common/gpu/media/android_video_decode_accelerator_impl.h" |
#include "base/bind.h" |
#include "base/logging.h" |
#include "base/message_loop/message_loop.h" |
#include "base/metrics/histogram.h" |
#include "content/common/gpu/gpu_channel.h" |
+#include "content/common/gpu/media/avda_return_on_failure.h" |
#include "gpu/command_buffer/service/gles2_cmd_decoder.h" |
#include "media/base/bitstream_buffer.h" |
#include "media/base/limits.h" |
@@ -20,26 +21,25 @@ |
namespace content { |
-// Helper macros for dealing with failure. If |result| evaluates false, emit |
-// |log| to ERROR, register |error| with the decoder, and return. |
-#define RETURN_ON_FAILURE(result, log, error) \ |
- do { \ |
- if (!(result)) { \ |
- DLOG(ERROR) << log; \ |
- base::MessageLoop::current()->PostTask( \ |
- FROM_HERE, \ |
- base::Bind(&AndroidVideoDecodeAccelerator::NotifyError, \ |
- weak_this_factory_.GetWeakPtr(), \ |
- error)); \ |
- state_ = ERROR; \ |
- return; \ |
- } \ |
- } while (0) |
- |
-// TODO(dwkang): We only need kMaxVideoFrames to pass media stack's prerolling |
-// phase, but 1 is added due to crbug.com/176036. This should be tuned when we |
-// have actual use case. |
-enum { kNumPictureBuffers = media::limits::kMaxVideoFrames + 1 }; |
+class InstantiatableAndroidVideoDecodeAcceleratorImpl |
+ : public AndroidVideoDecodeAcceleratorImpl { |
+ public: |
+ InstantiatableAndroidVideoDecodeAcceleratorImpl( |
+ const base::WeakPtr<gpu::gles2::GLES2Decoder> decoder, |
+ const base::Callback<bool(void)>& make_context_current, |
+ scoped_refptr<BackingStrategy> strategy) : |
+ AndroidVideoDecodeAcceleratorImpl(decoder, make_context_current, |
+ strategy) {} |
+}; |
+ |
+AndroidVideoDecodeAccelerator* AndroidVideoDecodeAccelerator::Create( |
+ const base::WeakPtr<gpu::gles2::GLES2Decoder> decoder, |
+ const base::Callback<bool(void)>& make_context_current, |
+ scoped_refptr<AndroidVideoDecodeAccelerator::BackingStrategy> strategy) |
+{ |
+ return new InstantiatableAndroidVideoDecodeAcceleratorImpl(decoder, |
+ make_context_current, strategy); |
+} |
// Max number of bitstreams notified to the client with |
// NotifyEndOfBitstreamBuffer() before getting output from the bitstream. |
@@ -87,9 +87,10 @@ static inline const base::TimeDelta NoWaitTimeOut() { |
return base::TimeDelta::FromMicroseconds(0); |
} |
-AndroidVideoDecodeAccelerator::AndroidVideoDecodeAccelerator( |
+AndroidVideoDecodeAcceleratorImpl::AndroidVideoDecodeAcceleratorImpl( |
const base::WeakPtr<gpu::gles2::GLES2Decoder> decoder, |
- const base::Callback<bool(void)>& make_context_current) |
+ const base::Callback<bool(void)>& make_context_current, |
+ scoped_refptr<BackingStrategy> strategy) |
: client_(NULL), |
make_context_current_(make_context_current), |
codec_(media::kCodecH264), |
@@ -97,20 +98,24 @@ AndroidVideoDecodeAccelerator::AndroidVideoDecodeAccelerator( |
surface_texture_id_(0), |
picturebuffers_requested_(false), |
gl_decoder_(decoder), |
+ strategy_(strategy), |
weak_this_factory_(this) {} |
-AndroidVideoDecodeAccelerator::~AndroidVideoDecodeAccelerator() { |
+AndroidVideoDecodeAcceleratorImpl::~AndroidVideoDecodeAcceleratorImpl() { |
DCHECK(thread_checker_.CalledOnValidThread()); |
} |
-bool AndroidVideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile, |
- Client* client) { |
+bool AndroidVideoDecodeAcceleratorImpl::Initialize( |
+ media::VideoCodecProfile profile, |
+ Client* client) { |
DCHECK(!media_codec_); |
DCHECK(thread_checker_.CalledOnValidThread()); |
client_ = client; |
codec_ = VideoCodecProfileToVideoCodec(profile); |
+ strategy_->SetStateProvider(this); |
+ |
bool profile_supported = codec_ == media::kCodecVP8; |
#if defined(ENABLE_MEDIA_PIPELINE_ON_ANDROID) |
profile_supported |= |
@@ -164,7 +169,7 @@ bool AndroidVideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile, |
return true; |
} |
-void AndroidVideoDecodeAccelerator::DoIOTask() { |
+void AndroidVideoDecodeAcceleratorImpl::DoIOTask() { |
DCHECK(thread_checker_.CalledOnValidThread()); |
if (state_ == ERROR) { |
return; |
@@ -174,7 +179,7 @@ void AndroidVideoDecodeAccelerator::DoIOTask() { |
DequeueOutput(); |
} |
-void AndroidVideoDecodeAccelerator::QueueInput() { |
+void AndroidVideoDecodeAcceleratorImpl::QueueInput() { |
DCHECK(thread_checker_.CalledOnValidThread()); |
if (bitstreams_notified_in_advance_.size() > kMaxBitstreamsNotifiedInAdvance) |
return; |
@@ -211,7 +216,8 @@ void AndroidVideoDecodeAccelerator::QueueInput() { |
scoped_ptr<base::SharedMemory> shm( |
new base::SharedMemory(bitstream_buffer.handle(), true)); |
- RETURN_ON_FAILURE(shm->Map(bitstream_buffer.size()), |
+ RETURN_ON_FAILURE(this, |
+ shm->Map(bitstream_buffer.size()), |
"Failed to SharedMemory::Map()", |
UNREADABLE_INPUT); |
@@ -220,7 +226,8 @@ void AndroidVideoDecodeAccelerator::QueueInput() { |
static_cast<const uint8*>(shm->memory()), |
bitstream_buffer.size(), |
timestamp); |
- RETURN_ON_FAILURE(status == media::MEDIA_CODEC_OK, |
+ RETURN_ON_FAILURE(this, |
+ status == media::MEDIA_CODEC_OK, |
"Failed to QueueInputBuffer: " << status, |
PLATFORM_FAILURE); |
@@ -233,13 +240,13 @@ void AndroidVideoDecodeAccelerator::QueueInput() { |
// TODO(dwkang): check if there is a way to remove this workaround. |
base::MessageLoop::current()->PostTask( |
FROM_HERE, |
- base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, |
+ base::Bind(&AndroidVideoDecodeAcceleratorImpl::NotifyEndOfBitstreamBuffer, |
weak_this_factory_.GetWeakPtr(), |
bitstream_buffer.id())); |
bitstreams_notified_in_advance_.push_back(bitstream_buffer.id()); |
} |
-void AndroidVideoDecodeAccelerator::DequeueOutput() { |
+void AndroidVideoDecodeAcceleratorImpl::DequeueOutput() { |
DCHECK(thread_checker_.CalledOnValidThread()); |
if (picturebuffers_requested_ && output_picture_buffers_.empty()) |
return; |
@@ -272,7 +279,8 @@ void AndroidVideoDecodeAccelerator::DequeueOutput() { |
size_ = gfx::Size(width, height); |
base::MessageLoop::current()->PostTask( |
FROM_HERE, |
- base::Bind(&AndroidVideoDecodeAccelerator::RequestPictureBuffers, |
+ base::Bind(&AndroidVideoDecodeAcceleratorImpl:: |
+ RequestPictureBuffers, |
weak_this_factory_.GetWeakPtr())); |
} else { |
// Dynamic resolution change support is not specified by the Android |
@@ -280,7 +288,8 @@ void AndroidVideoDecodeAccelerator::DequeueOutput() { |
// continue playback at this point. Instead, error out immediately, |
// expecting clients to Reset() as appropriate to avoid this. |
// b/7093648 |
- RETURN_ON_FAILURE(size_ == gfx::Size(width, height), |
+ RETURN_ON_FAILURE(this, |
+ size_ == gfx::Size(width, height), |
"Dynamic resolution change is not supported.", |
PLATFORM_FAILURE); |
} |
@@ -300,30 +309,16 @@ void AndroidVideoDecodeAccelerator::DequeueOutput() { |
} |
} while (buf_index < 0); |
- // This ignores the emitted ByteBuffer and instead relies on rendering to the |
- // codec's SurfaceTexture and then copying from that texture to the client's |
- // PictureBuffer's texture. This means that each picture's data is written |
- // three times: once to the ByteBuffer, once to the SurfaceTexture, and once |
- // to the client's texture. It would be nicer to either: |
- // 1) Render directly to the client's texture from MediaCodec (one write); or |
- // 2) Upload the ByteBuffer to the client's texture (two writes). |
- // Unfortunately neither is possible: |
- // 1) MediaCodec's use of SurfaceTexture is a singleton, and the texture |
- // written to can't change during the codec's lifetime. b/11990461 |
- // 2) The ByteBuffer is likely to contain the pixels in a vendor-specific, |
- // opaque/non-standard format. It's not possible to negotiate the decoder |
- // to emit a specific colorspace, even using HW CSC. b/10706245 |
- // So, we live with these two extra copies per picture :( |
- media_codec_->ReleaseOutputBuffer(buf_index, true); |
- |
if (eos) { |
+ media_codec_->ReleaseOutputBuffer(buf_index, false); |
base::MessageLoop::current()->PostTask( |
FROM_HERE, |
- base::Bind(&AndroidVideoDecodeAccelerator::NotifyFlushDone, |
+ base::Bind(&AndroidVideoDecodeAcceleratorImpl::NotifyFlushDone, |
weak_this_factory_.GetWeakPtr())); |
} else { |
int64 bitstream_buffer_id = timestamp.InMicroseconds(); |
- SendCurrentSurfaceToClient(static_cast<int32>(bitstream_buffer_id)); |
+ SendCurrentSurfaceToClient(buf_index, |
+ static_cast<int32>(bitstream_buffer_id)); |
// Removes ids former or equal than the id from decoder. Note that |
// |bitstreams_notified_in_advance_| does not mean bitstream ids in decoder |
@@ -342,75 +337,51 @@ void AndroidVideoDecodeAccelerator::DequeueOutput() { |
} |
} |
-void AndroidVideoDecodeAccelerator::SendCurrentSurfaceToClient( |
+void AndroidVideoDecodeAcceleratorImpl::SendCurrentSurfaceToClient( |
+ int32 codec_buffer_index, |
int32 bitstream_id) { |
DCHECK(thread_checker_.CalledOnValidThread()); |
DCHECK_NE(bitstream_id, -1); |
DCHECK(!free_picture_ids_.empty()); |
- RETURN_ON_FAILURE(make_context_current_.Run(), |
+ RETURN_ON_FAILURE(this, |
+ make_context_current_.Run(), |
"Failed to make this decoder's GL context current.", |
PLATFORM_FAILURE); |
int32 picture_buffer_id = free_picture_ids_.front(); |
free_picture_ids_.pop(); |
- float transfrom_matrix[16]; |
- surface_texture_->UpdateTexImage(); |
- surface_texture_->GetTransformMatrix(transfrom_matrix); |
- |
OutputBufferMap::const_iterator i = |
output_picture_buffers_.find(picture_buffer_id); |
- RETURN_ON_FAILURE(i != output_picture_buffers_.end(), |
+ RETURN_ON_FAILURE(this, |
+ i != output_picture_buffers_.end(), |
"Can't find a PictureBuffer for " << picture_buffer_id, |
PLATFORM_FAILURE); |
- uint32 picture_buffer_texture_id = i->second.texture_id(); |
- |
- RETURN_ON_FAILURE(gl_decoder_.get(), |
- "Failed to get gles2 decoder instance.", |
- ILLEGAL_STATE); |
- // Defer initializing the CopyTextureCHROMIUMResourceManager until it is |
- // needed because it takes 10s of milliseconds to initialize. |
- if (!copier_) { |
- copier_.reset(new gpu::CopyTextureCHROMIUMResourceManager()); |
- copier_->Initialize(gl_decoder_.get()); |
- } |
- // Here, we copy |surface_texture_id_| to the picture buffer instead of |
- // setting new texture to |surface_texture_| by calling attachToGLContext() |
- // because: |
- // 1. Once we call detachFrameGLContext(), it deletes the texture previous |
- // attached. |
- // 2. SurfaceTexture requires us to apply a transform matrix when we show |
- // the texture. |
- // TODO(hkuang): get the StreamTexture transform matrix in GPU process |
- // instead of using default matrix crbug.com/226218. |
- const static GLfloat default_matrix[16] = {1.0f, 0.0f, 0.0f, 0.0f, |
- 0.0f, 1.0f, 0.0f, 0.0f, |
- 0.0f, 0.0f, 1.0f, 0.0f, |
- 0.0f, 0.0f, 0.0f, 1.0f}; |
- copier_->DoCopyTextureWithTransform( |
- gl_decoder_.get(), GL_TEXTURE_EXTERNAL_OES, surface_texture_id_, |
- GL_TEXTURE_2D, picture_buffer_texture_id, GL_RGBA, GL_UNSIGNED_BYTE, |
- size_.width(), size_.height(), false, false, false, nullptr, |
- default_matrix); |
+ // Connect the PictureBuffer to the decoded frame, via whatever |
+ // mechanism the strategy likes. |
+ strategy_->AssignCurrentSurfaceToPictureBuffer(codec_buffer_index, |
+ i->second); |
// TODO(henryhsu): Pass (0, 0) as visible size will cause several test |
// cases failed. We should make sure |size_| is coded size or visible size. |
base::MessageLoop::current()->PostTask( |
- FROM_HERE, base::Bind(&AndroidVideoDecodeAccelerator::NotifyPictureReady, |
- weak_this_factory_.GetWeakPtr(), |
- media::Picture(picture_buffer_id, bitstream_id, |
- gfx::Rect(size_), false))); |
+ FROM_HERE, |
+ base::Bind(&AndroidVideoDecodeAcceleratorImpl::NotifyPictureReady, |
+ weak_this_factory_.GetWeakPtr(), |
+ media::Picture(picture_buffer_id, bitstream_id, |
+ gfx::Rect(size_), false))); |
} |
-void AndroidVideoDecodeAccelerator::Decode( |
+void AndroidVideoDecodeAcceleratorImpl::Decode( |
const media::BitstreamBuffer& bitstream_buffer) { |
DCHECK(thread_checker_.CalledOnValidThread()); |
if (bitstream_buffer.id() != -1 && bitstream_buffer.size() == 0) { |
base::MessageLoop::current()->PostTask( |
FROM_HERE, |
- base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, |
+ base::Bind(&AndroidVideoDecodeAcceleratorImpl:: |
+ NotifyEndOfBitstreamBuffer, |
weak_this_factory_.GetWeakPtr(), |
bitstream_buffer.id())); |
return; |
@@ -422,14 +393,20 @@ void AndroidVideoDecodeAccelerator::Decode( |
DoIOTask(); |
} |
-void AndroidVideoDecodeAccelerator::AssignPictureBuffers( |
+void AndroidVideoDecodeAcceleratorImpl::RequestPictureBuffers() { |
+ client_->ProvidePictureBuffers(strategy_->GetNumPictureBuffers(), |
+ size_, strategy_->GetTextureTarget()); |
+} |
+ |
+void AndroidVideoDecodeAcceleratorImpl::AssignPictureBuffers( |
const std::vector<media::PictureBuffer>& buffers) { |
DCHECK(thread_checker_.CalledOnValidThread()); |
DCHECK(output_picture_buffers_.empty()); |
DCHECK(free_picture_ids_.empty()); |
for (size_t i = 0; i < buffers.size(); ++i) { |
- RETURN_ON_FAILURE(buffers[i].size() == size_, |
+ RETURN_ON_FAILURE(this, |
+ buffers[i].size() == size_, |
"Invalid picture buffer size was passed.", |
INVALID_ARGUMENT); |
int32 id = buffers[i].id(); |
@@ -441,14 +418,16 @@ void AndroidVideoDecodeAccelerator::AssignPictureBuffers( |
dismissed_picture_ids_.erase(id); |
} |
- RETURN_ON_FAILURE(output_picture_buffers_.size() >= kNumPictureBuffers, |
+ RETURN_ON_FAILURE(this, |
+ output_picture_buffers_.size() >= |
+ strategy_->GetNumPictureBuffers(), |
"Invalid picture buffers were passed.", |
INVALID_ARGUMENT); |
DoIOTask(); |
} |
-void AndroidVideoDecodeAccelerator::ReusePictureBuffer( |
+void AndroidVideoDecodeAcceleratorImpl::ReusePictureBuffer( |
int32 picture_buffer_id) { |
DCHECK(thread_checker_.CalledOnValidThread()); |
@@ -464,13 +443,13 @@ void AndroidVideoDecodeAccelerator::ReusePictureBuffer( |
DoIOTask(); |
} |
-void AndroidVideoDecodeAccelerator::Flush() { |
+void AndroidVideoDecodeAcceleratorImpl::Flush() { |
DCHECK(thread_checker_.CalledOnValidThread()); |
Decode(media::BitstreamBuffer(-1, base::SharedMemoryHandle(), 0)); |
} |
-bool AndroidVideoDecodeAccelerator::ConfigureMediaCodec() { |
+bool AndroidVideoDecodeAcceleratorImpl::ConfigureMediaCodec() { |
DCHECK(thread_checker_.CalledOnValidThread()); |
DCHECK(surface_texture_.get()); |
@@ -486,11 +465,11 @@ bool AndroidVideoDecodeAccelerator::ConfigureMediaCodec() { |
io_timer_.Start(FROM_HERE, |
DecodePollDelay(), |
this, |
- &AndroidVideoDecodeAccelerator::DoIOTask); |
+ &AndroidVideoDecodeAcceleratorImpl::DoIOTask); |
return true; |
} |
-void AndroidVideoDecodeAccelerator::Reset() { |
+void AndroidVideoDecodeAcceleratorImpl::Reset() { |
DCHECK(thread_checker_.CalledOnValidThread()); |
while (!pending_bitstream_buffers_.empty()) { |
@@ -500,7 +479,8 @@ void AndroidVideoDecodeAccelerator::Reset() { |
if (bitstream_buffer_id != -1) { |
base::MessageLoop::current()->PostTask( |
FROM_HERE, |
- base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, |
+ base::Bind(&AndroidVideoDecodeAcceleratorImpl:: |
+ NotifyEndOfBitstreamBuffer, |
weak_this_factory_.GetWeakPtr(), |
bitstream_buffer_id)); |
} |
@@ -530,13 +510,15 @@ void AndroidVideoDecodeAccelerator::Reset() { |
base::MessageLoop::current()->PostTask( |
FROM_HERE, |
- base::Bind(&AndroidVideoDecodeAccelerator::NotifyResetDone, |
+ base::Bind(&AndroidVideoDecodeAcceleratorImpl::NotifyResetDone, |
weak_this_factory_.GetWeakPtr())); |
} |
-void AndroidVideoDecodeAccelerator::Destroy() { |
+void AndroidVideoDecodeAcceleratorImpl::Destroy() { |
DCHECK(thread_checker_.CalledOnValidThread()); |
+ strategy_->Cleanup(); |
+ |
weak_this_factory_.InvalidateWeakPtrs(); |
if (media_codec_) { |
io_timer_.Stop(); |
@@ -544,38 +526,69 @@ void AndroidVideoDecodeAccelerator::Destroy() { |
} |
if (surface_texture_id_) |
glDeleteTextures(1, &surface_texture_id_); |
- if (copier_) |
- copier_->Destroy(); |
delete this; |
} |
-bool AndroidVideoDecodeAccelerator::CanDecodeOnIOThread() { |
+bool AndroidVideoDecodeAcceleratorImpl::CanDecodeOnIOThread() { |
return false; |
} |
-void AndroidVideoDecodeAccelerator::RequestPictureBuffers() { |
- client_->ProvidePictureBuffers(kNumPictureBuffers, size_, GL_TEXTURE_2D); |
+const gfx::Size& AndroidVideoDecodeAcceleratorImpl::GetSize() const { |
+ return size_; |
+} |
+ |
+const base::ThreadChecker& |
+AndroidVideoDecodeAcceleratorImpl::ThreadChecker() const { |
+ return thread_checker_; |
+} |
+ |
+gfx::SurfaceTexture* |
+AndroidVideoDecodeAcceleratorImpl::GetSurfaceTexture() const { |
+ return surface_texture_.get(); |
+} |
+ |
+uint32 AndroidVideoDecodeAcceleratorImpl::GetSurfaceTextureId() const { |
+ return surface_texture_id_; |
+} |
+ |
+gpu::gles2::GLES2Decoder* |
+AndroidVideoDecodeAcceleratorImpl::GetGlDecoder() const { |
+ return gl_decoder_.get(); |
+} |
+ |
+media::VideoCodecBridge* AndroidVideoDecodeAcceleratorImpl::GetMediaCodec() { |
+ return media_codec_.get(); |
+} |
+ |
+void AndroidVideoDecodeAcceleratorImpl::PostError( |
+ const ::tracked_objects::Location& from_here, |
+ media::VideoDecodeAccelerator::Error error) { |
+ base::MessageLoop::current()->PostTask(from_here, |
+ base::Bind(&AndroidVideoDecodeAcceleratorImpl::NotifyError, |
+ weak_this_factory_.GetWeakPtr(), |
+ error)); |
+ state_ = ERROR; |
} |
-void AndroidVideoDecodeAccelerator::NotifyPictureReady( |
+void AndroidVideoDecodeAcceleratorImpl::NotifyPictureReady( |
const media::Picture& picture) { |
client_->PictureReady(picture); |
} |
-void AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer( |
+void AndroidVideoDecodeAcceleratorImpl::NotifyEndOfBitstreamBuffer( |
int input_buffer_id) { |
client_->NotifyEndOfBitstreamBuffer(input_buffer_id); |
} |
-void AndroidVideoDecodeAccelerator::NotifyFlushDone() { |
+void AndroidVideoDecodeAcceleratorImpl::NotifyFlushDone() { |
client_->NotifyFlushDone(); |
} |
-void AndroidVideoDecodeAccelerator::NotifyResetDone() { |
+void AndroidVideoDecodeAcceleratorImpl::NotifyResetDone() { |
client_->NotifyResetDone(); |
} |
-void AndroidVideoDecodeAccelerator::NotifyError( |
+void AndroidVideoDecodeAcceleratorImpl::NotifyError( |
media::VideoDecodeAccelerator::Error error) { |
client_->NotifyError(error); |
} |