Index: media/gpu/android_video_encode_accelerator.cc |
diff --git a/media/gpu/android_video_encode_accelerator.cc b/media/gpu/android_video_encode_accelerator.cc |
index 023d2351a1ee4047b0b008d784b114ce887b9d07..9dadc2ad1a4b41cae7de449f65a6c7d0dbcee3af 100644 |
--- a/media/gpu/android_video_encode_accelerator.cc |
+++ b/media/gpu/android_video_encode_accelerator.cc |
@@ -35,6 +35,8 @@ const int kMaxEncodeFrameHeight = 720; |
const int kMaxFramerateNumerator = 30; |
const int kMaxFramerateDenominator = 1; |
+const int64_t kNumMicrosecsPerSec = 1000000; |
+ |
enum PixelFormat { |
// Subset of MediaCodecInfo.CodecCapabilities. |
COLOR_FORMAT_YUV420_PLANAR = 19, |
@@ -108,8 +110,7 @@ AndroidVideoEncodeAccelerator::GetSupportedProfiles() { |
const VideoCodec codec; |
const VideoCodecProfile profile; |
} kSupportedCodecs[] = {{kCodecVP8, VP8PROFILE_ANY}, |
- {kCodecH264, H264PROFILE_BASELINE}, |
- {kCodecH264, H264PROFILE_MAIN}}; |
+ {kCodecH264, H264PROFILE_BASELINE}}; |
for (const auto& supported_codec : kSupportedCodecs) { |
if (supported_codec.codec == kCodecVP8 && |
@@ -117,6 +118,11 @@ AndroidVideoEncodeAccelerator::GetSupportedProfiles() { |
continue; |
} |
+ if (supported_codec.codec == kCodecH264 && |
+ !MediaCodecUtil::IsH264EncoderAvailable()) { |
+ continue; |
+ } |
+ |
if (VideoCodecBridge::IsKnownUnaccelerated(supported_codec.codec, |
MEDIA_CODEC_ENCODER)) { |
continue; |
@@ -161,15 +167,18 @@ bool AndroidVideoEncodeAccelerator::Initialize( |
// encoder before being returned any output frames, since the encoder may |
// need to hold onto some subset of inputs as reference pictures. |
uint32_t frame_input_count; |
+ uint32_t i_frame_interval; |
if (output_profile == VP8PROFILE_ANY) { |
codec = kCodecVP8; |
mime_type = "video/x-vnd.on2.vp8"; |
frame_input_count = 1; |
+ i_frame_interval = IFRAME_INTERVAL_VPX; |
} else if (output_profile == H264PROFILE_BASELINE || |
output_profile == H264PROFILE_MAIN) { |
codec = kCodecH264; |
mime_type = "video/avc"; |
frame_input_count = 30; |
+ i_frame_interval = IFRAME_INTERVAL_H264; |
} else { |
return false; |
} |
@@ -190,7 +199,7 @@ bool AndroidVideoEncodeAccelerator::Initialize( |
} |
media_codec_.reset(VideoCodecBridge::CreateEncoder( |
codec, input_visible_size, initial_bitrate, INITIAL_FRAMERATE, |
- IFRAME_INTERVAL, pixel_format)); |
+ i_frame_interval, pixel_format)); |
if (!media_codec_) { |
DLOG(ERROR) << "Failed to create/start the codec: " |
@@ -347,9 +356,10 @@ void AndroidVideoEncodeAccelerator::QueueInput() { |
frame->coded_size().height()); |
RETURN_ON_FAILURE(converted, "Failed to I420ToNV12!", kPlatformFailureError); |
- fake_input_timestamp_ += base::TimeDelta::FromMicroseconds(1); |
+ input_timestamp_ += base::TimeDelta::FromMicroseconds(kNumMicrosecsPerSec / |
DaleCurtis
2016/09/29 19:34:14
base::Time:: has this already for you.
braveyao
2016/09/30 18:22:02
Done.
|
+ INITIAL_FRAMERATE); |
status = media_codec_->QueueInputBuffer(input_buf_index, nullptr, queued_size, |
- fake_input_timestamp_); |
+ input_timestamp_); |
UMA_HISTOGRAM_TIMES("Media.AVDA.InputQueueTime", |
base::Time::Now() - std::get<2>(input)); |
RETURN_ON_FAILURE(status == MEDIA_CODEC_OK, |
@@ -383,10 +393,10 @@ void AndroidVideoEncodeAccelerator::DequeueOutput() { |
return; |
case MEDIA_CODEC_OUTPUT_FORMAT_CHANGED: |
- break; |
+ return; |
case MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED: |
- break; |
+ return; |
case MEDIA_CODEC_OK: |
DCHECK_GE(buf_index, 0); |