Index: media/gpu/android_video_encode_accelerator.cc |
diff --git a/content/common/gpu/media/android_video_encode_accelerator.cc b/media/gpu/android_video_encode_accelerator.cc |
similarity index 85% |
rename from content/common/gpu/media/android_video_encode_accelerator.cc |
rename to media/gpu/android_video_encode_accelerator.cc |
index accaa50145a13e1bf215c37b370aad1a3ca3813d..5b43ff5325c703597301832e1ccb5dedf426b31a 100644 |
--- a/content/common/gpu/media/android_video_encode_accelerator.cc |
+++ b/media/gpu/android_video_encode_accelerator.cc |
@@ -2,7 +2,7 @@ |
// Use of this source code is governed by a BSD-style license that can be |
// found in the LICENSE file. |
-#include "content/common/gpu/media/android_video_encode_accelerator.h" |
+#include "media/gpu/android_video_encode_accelerator.h" |
#include <set> |
@@ -10,12 +10,12 @@ |
#include "base/logging.h" |
#include "base/message_loop/message_loop.h" |
#include "base/metrics/histogram.h" |
-#include "content/common/gpu/media/shared_memory_region.h" |
#include "gpu/command_buffer/service/gles2_cmd_decoder.h" |
#include "gpu/ipc/service/gpu_channel.h" |
#include "media/base/android/media_codec_util.h" |
#include "media/base/bitstream_buffer.h" |
#include "media/base/limits.h" |
+#include "media/gpu/shared_memory_region.h" |
#include "media/video/picture.h" |
#include "third_party/libyuv/include/libyuv/convert_from.h" |
#include "ui/gl/android/scoped_java_surface.h" |
@@ -24,7 +24,7 @@ |
using media::VideoCodecBridge; |
using media::VideoFrame; |
-namespace content { |
+namespace media { |
// Limit default max video codec size for Android to avoid |
// HW codec initialization failure for resolution higher than 720p. |
@@ -93,8 +93,7 @@ static bool GetSupportedColorFormatForMime(const std::string& mime, |
} |
AndroidVideoEncodeAccelerator::AndroidVideoEncodeAccelerator() |
- : num_buffers_at_codec_(0), |
- last_set_bitrate_(0) {} |
+ : num_buffers_at_codec_(0), last_set_bitrate_(0) {} |
AndroidVideoEncodeAccelerator::~AndroidVideoEncodeAccelerator() { |
DCHECK(thread_checker_.CalledOnValidThread()); |
@@ -105,13 +104,11 @@ AndroidVideoEncodeAccelerator::GetSupportedProfiles() { |
SupportedProfiles profiles; |
const struct { |
- const media::VideoCodec codec; |
- const media::VideoCodecProfile profile; |
- } kSupportedCodecs[] = { |
- { media::kCodecVP8, media::VP8PROFILE_ANY }, |
- { media::kCodecH264, media::H264PROFILE_BASELINE }, |
- { media::kCodecH264, media::H264PROFILE_MAIN } |
- }; |
+ const media::VideoCodec codec; |
+ const media::VideoCodecProfile profile; |
+ } kSupportedCodecs[] = {{media::kCodecVP8, media::VP8PROFILE_ANY}, |
+ {media::kCodecH264, media::H264PROFILE_BASELINE}, |
+ {media::kCodecH264, media::H264PROFILE_MAIN}}; |
for (const auto& supported_codec : kSupportedCodecs) { |
if (supported_codec.codec == media::kCodecVP8 && |
@@ -128,8 +125,7 @@ AndroidVideoEncodeAccelerator::GetSupportedProfiles() { |
profile.profile = supported_codec.profile; |
// It would be nice if MediaCodec exposes the maximum capabilities of |
// the encoder. Hard-code some reasonable defaults as workaround. |
- profile.max_resolution.SetSize(kMaxEncodeFrameWidth, |
- kMaxEncodeFrameHeight); |
+ profile.max_resolution.SetSize(kMaxEncodeFrameWidth, kMaxEncodeFrameHeight); |
profile.max_framerate_numerator = kMaxFramerateNumerator; |
profile.max_framerate_denominator = kMaxFramerateDenominator; |
profiles.push_back(profile); |
@@ -192,12 +188,9 @@ bool AndroidVideoEncodeAccelerator::Initialize( |
DLOG(ERROR) << "No color format support."; |
return false; |
} |
- media_codec_.reset(media::VideoCodecBridge::CreateEncoder(codec, |
- input_visible_size, |
- initial_bitrate, |
- INITIAL_FRAMERATE, |
- IFRAME_INTERVAL, |
- pixel_format)); |
+ media_codec_.reset(media::VideoCodecBridge::CreateEncoder( |
+ codec, input_visible_size, initial_bitrate, INITIAL_FRAMERATE, |
+ IFRAME_INTERVAL, pixel_format)); |
if (!media_codec_) { |
DLOG(ERROR) << "Failed to create/start the codec: " |
@@ -211,19 +204,15 @@ bool AndroidVideoEncodeAccelerator::Initialize( |
base::MessageLoop::current()->PostTask( |
FROM_HERE, |
base::Bind(&VideoEncodeAccelerator::Client::RequireBitstreamBuffers, |
- client_ptr_factory_->GetWeakPtr(), |
- frame_input_count, |
- input_visible_size, |
- output_buffer_capacity)); |
+ client_ptr_factory_->GetWeakPtr(), frame_input_count, |
+ input_visible_size, output_buffer_capacity)); |
return true; |
} |
void AndroidVideoEncodeAccelerator::MaybeStartIOTimer() { |
if (!io_timer_.IsRunning() && |
(num_buffers_at_codec_ > 0 || !pending_frames_.empty())) { |
- io_timer_.Start(FROM_HERE, |
- EncodePollDelay(), |
- this, |
+ io_timer_.Start(FROM_HERE, EncodePollDelay(), this, |
&AndroidVideoEncodeAccelerator::DoIOTask); |
} |
} |
@@ -314,8 +303,7 @@ void AndroidVideoEncodeAccelerator::QueueInput() { |
if (status != media::MEDIA_CODEC_OK) { |
DCHECK(status == media::MEDIA_CODEC_DEQUEUE_INPUT_AGAIN_LATER || |
status == media::MEDIA_CODEC_ERROR); |
- RETURN_ON_FAILURE(status != media::MEDIA_CODEC_ERROR, |
- "MediaCodec error", |
+ RETURN_ON_FAILURE(status != media::MEDIA_CODEC_ERROR, "MediaCodec error", |
kPlatformFailureError); |
return; |
} |
@@ -351,24 +339,18 @@ void AndroidVideoEncodeAccelerator::QueueInput() { |
int dst_stride_uv = frame->stride(VideoFrame::kUPlane) * 2; |
// Why NV12? Because COLOR_FORMAT_YUV420_SEMIPLANAR. See comment at other |
// mention of that constant. |
- bool converted = !libyuv::I420ToNV12(frame->data(VideoFrame::kYPlane), |
- frame->stride(VideoFrame::kYPlane), |
- frame->data(VideoFrame::kUPlane), |
- frame->stride(VideoFrame::kUPlane), |
- frame->data(VideoFrame::kVPlane), |
- frame->stride(VideoFrame::kVPlane), |
- dst_y, |
- dst_stride_y, |
- dst_uv, |
- dst_stride_uv, |
- frame->coded_size().width(), |
- frame->coded_size().height()); |
+ bool converted = !libyuv::I420ToNV12( |
+ frame->data(VideoFrame::kYPlane), frame->stride(VideoFrame::kYPlane), |
+ frame->data(VideoFrame::kUPlane), frame->stride(VideoFrame::kUPlane), |
+ frame->data(VideoFrame::kVPlane), frame->stride(VideoFrame::kVPlane), |
+ dst_y, dst_stride_y, dst_uv, dst_stride_uv, frame->coded_size().width(), |
+ frame->coded_size().height()); |
RETURN_ON_FAILURE(converted, "Failed to I420ToNV12!", kPlatformFailureError); |
fake_input_timestamp_ += base::TimeDelta::FromMicroseconds(1); |
- status = media_codec_->QueueInputBuffer( |
- input_buf_index, NULL, queued_size, fake_input_timestamp_); |
- UMA_HISTOGRAM_TIMES("Media.AVEA.InputQueueTime", |
+ status = media_codec_->QueueInputBuffer(input_buf_index, NULL, queued_size, |
+ fake_input_timestamp_); |
+ UMA_HISTOGRAM_TIMES("Media.AVDA.InputQueueTime", |
base::Time::Now() - base::get<2>(input)); |
RETURN_ON_FAILURE(status == media::MEDIA_CODEC_OK, |
"Failed to QueueInputBuffer: " << status, |
@@ -433,14 +415,11 @@ void AndroidVideoEncodeAccelerator::DequeueOutput() { |
media_codec_->ReleaseOutputBuffer(buf_index, false); |
--num_buffers_at_codec_; |
- UMA_HISTOGRAM_COUNTS_10000("Media.AVEA.EncodedBufferSizeKB", size / 1024); |
base::MessageLoop::current()->PostTask( |
FROM_HERE, |
base::Bind(&VideoEncodeAccelerator::Client::BitstreamBufferReady, |
- client_ptr_factory_->GetWeakPtr(), |
- bitstream_buffer.id(), |
- size, |
+ client_ptr_factory_->GetWeakPtr(), bitstream_buffer.id(), size, |
key_frame)); |
} |
-} // namespace content |
+} // namespace media |