Index: content/common/gpu/media/android_video_decode_accelerator.cc |
diff --git a/content/common/gpu/media/android_video_decode_accelerator.cc b/content/common/gpu/media/android_video_decode_accelerator.cc |
new file mode 100644 |
index 0000000000000000000000000000000000000000..019ddd78f1fb1c5010053ba952965150fbba79d7 |
--- /dev/null |
+++ b/content/common/gpu/media/android_video_decode_accelerator.cc |
@@ -0,0 +1,357 @@ |
+// Copyright (c) 2013 The Chromium Authors. All rights reserved. |
+// Use of this source code is governed by a BSD-style license that can be |
+// found in the LICENSE file. |
+ |
+#include "content/common/gpu/media/android_video_decode_accelerator.h" |
+ |
+#include <jni.h> |
+ |
+#include "base/android/jni_android.h" |
+#include "base/android/scoped_java_ref.h" |
+#include "base/bind.h" |
+#include "base/debug/trace_event.h" |
+#include "base/logging.h" |
+#include "base/stl_util.h" |
+#include "base/string_util.h" |
+#include "content/common/android/surface_callback.h" |
+#include "content/common/gpu/gpu_channel.h" |
+#include "content/common/gpu/media/gles2_external_texture_copier.h" |
+#include "media/base/android/media_codec_bridge.h" |
+#include "media/base/bitstream_buffer.h" |
+#include "media/video/picture.h" |
+#include "third_party/angle/include/GLES2/gl2.h" |
+#include "third_party/angle/include/GLES2/gl2ext.h" |
+ |
+using base::android::MethodID; |
+using base::android::ScopedJavaLocalRef; |
+ |
+namespace content { |
+ |
+#define LOG_LINE() VLOG(1) << __FUNCTION__ |
+ |
+enum { kNumPictureBuffers = 4 }; |
+enum { kDecodePollDelayMs = 10 }; |
+ |
+enum { kDequeueInputBufferTimeOutUs = 10 }; |
ycheo (away)
2013/01/17 08:34:44
Could you leave some comment on why you choose thi
dwkang1
2013/01/18 07:14:08
Done.
|
+enum { kDequeueOutputBufferTimeOutUs = 10 }; |
+ |
+AndroidVideoDecodeAccelerator::AndroidVideoDecodeAccelerator( |
+ media::VideoDecodeAccelerator::Client* client, |
+ const base::Callback<bool(void)>& make_context_current) |
+ : message_loop_(MessageLoop::current()), |
+ client_(client), |
+ make_context_current_(make_context_current), |
+ codec_(UNKNOWN), |
+ surface_texture_id_(0), |
+ picturebuffer_requested_(false), |
+ color_format_(0), |
+ width_(0), |
+ height_(0), |
+ current_bitstream_id_(-1) { |
+ LOG_LINE(); |
+} |
+ |
+AndroidVideoDecodeAccelerator::~AndroidVideoDecodeAccelerator() { |
+ LOG_LINE(); |
+ DCHECK_EQ(message_loop_, MessageLoop::current()); |
+} |
+ |
+bool AndroidVideoDecodeAccelerator::Initialize( |
+ media::VideoCodecProfile profile) { |
+ LOG_LINE(); |
+ DCHECK_EQ(message_loop_, MessageLoop::current()); |
+ |
+ if (profile == media::VP8PROFILE_MAIN) { |
+ codec_ = VP8; |
+ } else if (profile >= media::H264PROFILE_MIN |
+ && profile <= media::H264PROFILE_MAX) { |
+ codec_ = H264; |
+ }else { |
+ LOG(ERROR) << "Unsupported profile: " << profile; |
+ return false; |
+ } |
+ |
+ if (media_codec_ == NULL) { |
+ if (!make_context_current_.Run()) { |
+ LOG(ERROR) << "Failed to make this decoder's GL context current."; |
+ return false; |
+ } |
+ glGenTextures(1, &surface_texture_id_); |
+ glActiveTexture(GL_TEXTURE0); |
+ glBindTexture(GL_TEXTURE_EXTERNAL_OES, surface_texture_id_); |
+ |
+ glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_NEAREST); |
+ glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_NEAREST); |
+ glTexParameteri(GL_TEXTURE_EXTERNAL_OES, |
+ GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); |
+ glTexParameteri(GL_TEXTURE_EXTERNAL_OES, |
+ GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); |
+ |
+ surface_texture_ = new SurfaceTextureBridge(surface_texture_id_); |
+ |
+ ConfigureMediaCodec(); |
+ } |
+ |
+ message_loop_->PostTask( |
+ FROM_HERE, |
+ base::Bind( |
+ &AndroidVideoDecodeAccelerator::DoDecode, base::Unretained(this))); |
+ |
+ if (client_) |
+ client_->NotifyInitializeDone(); |
+ return true; |
+} |
+ |
+void AndroidVideoDecodeAccelerator::DoDecode() { |
+ QueueInput(); |
+ DequeueOutput(); |
+ |
+ message_loop_->PostDelayedTask( |
+ FROM_HERE, |
+ base::Bind( |
+ &AndroidVideoDecodeAccelerator::DoDecode, base::Unretained(this)), |
+ base::TimeDelta::FromMilliseconds(kDecodePollDelayMs)); |
ycheo (away)
2013/01/17 08:34:44
Why don't you make the whole TimeDelta as a consta
dwkang1
2013/01/18 07:14:08
Done.
|
+} |
+ |
+void AndroidVideoDecodeAccelerator::QueueInput() { |
+ if (!pending_bitstream_buffers_.empty()) { |
+ int input_buf_index = |
+ media_codec_->DequeueInputBuffer(kDequeueInputBufferTimeOutUs); |
+ if (input_buf_index < 0) { |
+ return; |
+ } |
+ media::BitstreamBuffer& bitstream_buffer = |
+ pending_bitstream_buffers_.front(); |
+ pending_bitstream_buffers_.pop(); |
+ |
+ int flags = 0; |
+ if (bitstream_buffer.id() == -1) { |
+ flags |= 4; // BUFFER_FLAG_END_OF_STREAM |
+ } |
+ if (bitstream_buffer.size() > 0) { |
+ scoped_ptr<base::SharedMemory> shm( |
+ new base::SharedMemory(bitstream_buffer.handle(), true)); |
+ if (!shm->Map(bitstream_buffer.size())) { |
+ LOG(ERROR) << "Failed to SharedMemory::Map()"; |
+ if (bitstream_buffer.id() != -1) { |
+ client_->NotifyEndOfBitstreamBuffer(bitstream_buffer.id()); |
+ } |
+ return; |
ycheo (away)
2013/01/17 08:34:44
What can we do for the dequeued input_buf_index?
dwkang1
2013/01/18 07:14:08
Not much. I've noticed that other vda implementati
|
+ } |
+ media_codec_->PutToInputBuffer( |
+ input_buf_index, |
+ static_cast<const uint8*>(shm->memory()), |
+ bitstream_buffer.size()); |
+ } |
+ // Abuse the presentation time argument to propagate the bitstream |
+ // buffer ID to the output, so we can report it back to the client in |
+ // PictureReady(). |
+ int64 timestamp = bitstream_buffer.id(); |
+ media_codec_->QueueInputBuffer( |
+ input_buf_index, 0, bitstream_buffer.size(), timestamp, flags); |
+ |
+ if (bitstream_buffer.id() != -1) { |
+ client_->NotifyEndOfBitstreamBuffer(bitstream_buffer.id()); |
+ } |
+ } |
+} |
+ |
+void AndroidVideoDecodeAccelerator::DequeueOutput() { |
+ if (picturebuffer_requested_ && picture_map_.empty()) { |
+ DLOG(INFO) << "Picture buffers are not ready."; |
+ return; |
+ } |
+ if (!picture_map_.empty() && free_picture_ids_.empty()) { |
+ // Don't have any picture buffer to send. Need to wait more. |
+ return; |
+ } |
+ |
+ int32 output_offset = 0; |
+ int32 output_size = 0; |
+ int32 output_flag = 0; |
+ int64 timestamp = 0; |
+ int32 output_buf_index = 0; |
+ do { |
+ output_buf_index = media_codec_->DequeueOutputBuffer( |
+ kDequeueOutputBufferTimeOutUs, &output_offset, &output_size, |
+ ×tamp, &output_flag); |
+ switch (output_buf_index) { |
+ case -1: // INFO_TRY_AGAIN_LATER |
ycheo (away)
2013/01/17 08:34:44
Sorry, this should be my work. please define these
dwkang1
2013/01/18 07:14:08
Done.
|
+ return; |
+ break; |
ycheo (away)
2013/01/17 08:34:44
redundant?
dwkang1
2013/01/18 07:14:08
Removed.
|
+ |
+ case -2: // INFO_OUTPUT_FORMAT_CHANGED |
+ media_codec_->GetOutputFormat(&color_format_, &width_, &height_); |
+ DLOG(INFO) << "Output color format: " << color_format_; |
+ DLOG(INFO) << "Output size: " << width_ << "x" << height_; |
+ if (!picturebuffer_requested_) { |
+ picturebuffer_requested_ = true; |
+ texture_copier_.reset(new Gles2ExternalTextureCopier()); |
+ texture_copier_->Init(width_, height_); |
+ client_->ProvidePictureBuffers( |
+ kNumPictureBuffers, |
+ gfx::Size(width_, height_), |
+ GL_TEXTURE_2D); |
+ } |
+ // TODO(dwkang): support the dynamic resolution change. |
+ return; |
+ break; |
+ |
+ case -3: // INFO_OUTPUT_BUFFERS_CHANGED |
+ media_codec_->GetOutputBuffers(); |
+ break; |
+ } |
+ } while (output_buf_index < 0); |
+ |
+ if (output_flag & 4) { // BUFFER_FLAG_END_OF_STREAM |
+ if (client_) { |
+ client_->NotifyFlushDone(); |
+ } |
+ } |
+ |
+ media_codec_->ReleaseOutputBuffer(output_buf_index, true); |
+ current_bitstream_id_ = static_cast<int32>(timestamp); |
+ if (current_bitstream_id_ != -1) { |
+ SendCurrentSurfaceToClient(); |
+ } |
+} |
+ |
+void AndroidVideoDecodeAccelerator::SendCurrentSurfaceToClient() { |
+ LOG_LINE(); |
+ |
+ DCHECK_EQ(message_loop_, MessageLoop::current()); |
+ DCHECK_NE(current_bitstream_id_, -1); |
+ DCHECK(!free_picture_ids_.empty()); |
+ |
+ int32 picture_buffer_id = free_picture_ids_.front(); |
+ free_picture_ids_.pop(); |
+ |
+ if (!make_context_current_.Run()) { |
+ LOG(ERROR) << "Failed to make this decoder's GL context current."; |
+ return; |
+ } |
+ |
+ float mtx[16]; |
+ surface_texture_->UpdateTexImage(); |
+ surface_texture_->GetTransformMatrix(mtx); |
+ CopyCurrentFrameToPictureBuffer(picture_buffer_id, mtx); |
+ |
+ client_->PictureReady( |
+ media::Picture(picture_buffer_id, current_bitstream_id_)); |
+ current_bitstream_id_ = -1; |
+} |
+ |
+void AndroidVideoDecodeAccelerator::CopyCurrentFrameToPictureBuffer( |
+ int32 picture_buffer_id, float transfrom_matrix[16]) { |
+ PictureMap::const_iterator i = picture_map_.find(picture_buffer_id); |
+ if (i == picture_map_.end()) { |
+ LOG(ERROR) << "Can't find a PuctureBuffer for " << picture_buffer_id; |
+ return; |
+ } |
+ uint32 picture_buffer_texture_id = i->second.texture_id(); |
+ texture_copier_->Copy(surface_texture_id_, GL_TEXTURE_EXTERNAL_OES, |
+ transfrom_matrix, |
+ picture_buffer_texture_id, GL_TEXTURE_2D); |
+} |
+ |
+void AndroidVideoDecodeAccelerator::Decode( |
+ const media::BitstreamBuffer& bitstream_buffer) { |
+ LOG_LINE(); |
+ DCHECK_EQ(message_loop_, MessageLoop::current()); |
+ if (!client_) { |
+ return; |
+ } |
+ pending_bitstream_buffers_.push(bitstream_buffer); |
+} |
+ |
+void AndroidVideoDecodeAccelerator::AssignPictureBuffers( |
+ const std::vector<media::PictureBuffer>& buffers) { |
+ LOG_LINE(); |
+ DCHECK_EQ(message_loop_, MessageLoop::current()); |
+ DCHECK(picture_map_.empty()); |
+ |
+ for (size_t i = 0; i < buffers.size(); ++i) { |
+ picture_map_.insert(std::make_pair(buffers[i].id(), buffers[i])); |
+ free_picture_ids_.push(buffers[i].id()); |
+ } |
+} |
+ |
+void AndroidVideoDecodeAccelerator::ReusePictureBuffer( |
+ int32 picture_buffer_id) { |
+ LOG_LINE(); |
+ DCHECK_EQ(message_loop_, MessageLoop::current()); |
+ free_picture_ids_.push(picture_buffer_id); |
+} |
+ |
+void AndroidVideoDecodeAccelerator::Flush() { |
+ LOG_LINE(); |
+ DCHECK_EQ(message_loop_, MessageLoop::current()); |
+ |
+ Decode(media::BitstreamBuffer(-1, base::SharedMemoryHandle(), 0)); |
+} |
+ |
+void AndroidVideoDecodeAccelerator::ConfigureMediaCodec() { |
+ DCHECK(surface_texture_.get()); |
+ DCHECK(codec_ == H264 || codec_ == VP8); |
+ |
+ std::string mime; |
+ if (codec_ == VP8) { |
+ mime = "video/x-vnd.on2.vp8"; |
+ } else if (codec_ == H264) { |
+ mime = "video/avc"; |
+ } else { |
+ LOG(ERROR) << "Unsupported codec type " << codec_; |
+ NOTREACHED(); |
+ } |
+ media_codec_.reset(new media::MediaCodecBridge(mime)); |
+ |
+ JNIEnv* env = base::android::AttachCurrentThread(); |
+ CHECK(env); |
+ ScopedJavaLocalRef<jclass> cls( |
+ base::android::GetClass(env, "android/view/Surface")); |
+ jmethodID constructor = MethodID::Get<MethodID::TYPE_INSTANCE>( |
+ env, cls.obj(), "<init>", "(Landroid/graphics/SurfaceTexture;)V"); |
+ ScopedJavaLocalRef<jobject> j_surface( |
+ env, env->NewObject( |
+ cls.obj(), constructor, |
+ surface_texture_->j_surface_texture().obj())); |
+ |
+ // VDA does not pass the container indicated resolution in the initialization |
+ // phase. Here, we set 1080p by default. |
+ media_codec_->ConfigureVideo( |
+ mime, 1920, 1080, NULL, 0, NULL, 0, j_surface.obj()); |
+ content::ReleaseSurface(j_surface.obj()); |
+ |
+ media_codec_->Start(); |
+ media_codec_->GetInputBuffers(); |
+ media_codec_->GetOutputBuffers(); |
+} |
+ |
+void AndroidVideoDecodeAccelerator::Reset() { |
+ LOG_LINE(); |
+ DCHECK_EQ(message_loop_, MessageLoop::current()); |
+ |
+ while(!pending_bitstream_buffers_.empty()) { |
+ media::BitstreamBuffer& bitstream_buffer = |
+ pending_bitstream_buffers_.front(); |
+ pending_bitstream_buffers_.pop(); |
+ |
+ if (bitstream_buffer.id() != -1) { |
+ client_->NotifyEndOfBitstreamBuffer(bitstream_buffer.id()); |
+ } |
+ } |
+ media_codec_->Flush(); |
+ media_codec_->Stop(); |
ycheo (away)
2013/01/17 08:34:44
Why not Release()?
dwkang1
2013/01/18 07:14:08
Done.
|
+ ConfigureMediaCodec(); |
+ |
+ if (client_) { |
+ client_->NotifyResetDone(); |
+ } |
+} |
+ |
+void AndroidVideoDecodeAccelerator::Destroy() { |
+ LOG_LINE(); |
+ DCHECK_EQ(message_loop_, MessageLoop::current()); |
+} |
+ |
+} // namespace content |