Index: content/common/gpu/media/android_video_encode_accelerator.cc |
diff --git a/content/common/gpu/media/android_video_encode_accelerator.cc b/content/common/gpu/media/android_video_encode_accelerator.cc |
index eb383081d7fef5cf535899c6c03dfc7b3070d4cc..7e15ad30b24d6d6f08d99d168849170427817457 100644 |
--- a/content/common/gpu/media/android_video_encode_accelerator.cc |
+++ b/content/common/gpu/media/android_video_encode_accelerator.cc |
@@ -95,8 +95,6 @@ static bool GetSupportedColorFormatForMime(const std::string& mime, |
AndroidVideoEncodeAccelerator::AndroidVideoEncodeAccelerator() |
: num_buffers_at_codec_(0), |
- num_output_buffers_(-1), |
- output_buffers_capacity_(0), |
last_set_bitrate_(0) {} |
AndroidVideoEncodeAccelerator::~AndroidVideoEncodeAccelerator() { |
@@ -164,17 +162,24 @@ bool AndroidVideoEncodeAccelerator::Initialize( |
std::string mime_type; |
media::VideoCodec codec; |
+ // The client should be prepared to feed at least this many frames into the |
+ // encoder before being returned any output frames, since the encoder may |
+ // need to hold onto some subset of inputs as reference pictures. |
+ unsigned int frame_input_count; |
DaleCurtis
2016/02/10 17:52:48
We don't use unsigned in Chromium code, this shoul
magjed_chromium
2016/02/10 20:25:12
Done, changed to uint32_t. The reason I used 'unsi
|
if (output_profile == media::VP8PROFILE_ANY) { |
codec = media::kCodecVP8; |
mime_type = "video/x-vnd.on2.vp8"; |
+ frame_input_count = 1; |
} else if (output_profile == media::H264PROFILE_BASELINE || |
output_profile == media::H264PROFILE_MAIN) { |
codec = media::kCodecH264; |
mime_type = "video/avc"; |
+ frame_input_count = 30; |
} else { |
return false; |
} |
+ frame_size_ = input_visible_size; |
last_set_bitrate_ = initial_bitrate; |
// Only consider using MediaCodec if it's likely backed by hardware. |
@@ -202,15 +207,16 @@ bool AndroidVideoEncodeAccelerator::Initialize( |
return false; |
} |
- num_output_buffers_ = media_codec_->GetOutputBuffersCount(); |
- output_buffers_capacity_ = media_codec_->GetOutputBuffersCapacity(); |
+ // Conservative upper bound for output buffer size: decoded size + 2KB. |
+ const size_t output_buffer_capacity = |
+ VideoFrame::AllocationSize(format, input_visible_size) + 2048; |
base::MessageLoop::current()->PostTask( |
FROM_HERE, |
base::Bind(&VideoEncodeAccelerator::Client::RequireBitstreamBuffers, |
client_ptr_factory_->GetWeakPtr(), |
- num_output_buffers_, |
+ frame_input_count, |
input_visible_size, |
- output_buffers_capacity_)); |
+ output_buffer_capacity)); |
return true; |
} |
@@ -238,7 +244,8 @@ void AndroidVideoEncodeAccelerator::Encode( |
DCHECK(thread_checker_.CalledOnValidThread()); |
RETURN_ON_FAILURE(frame->format() == media::PIXEL_FORMAT_I420, |
"Unexpected format", kInvalidArgumentError); |
- |
+ RETURN_ON_FAILURE(frame->visible_rect().size() == frame_size_, |
+ "Unexpected resolution", kInvalidArgumentError); |
// MediaCodec doesn't have a way to specify stride for non-Packed formats, so |
// we insist on being called with packed frames and no cropping :( |
RETURN_ON_FAILURE(frame->row_bytes(VideoFrame::kYPlane) == |
@@ -260,9 +267,6 @@ void AndroidVideoEncodeAccelerator::UseOutputBitstreamBuffer( |
const media::BitstreamBuffer& buffer) { |
DVLOG(3) << __PRETTY_FUNCTION__ << ": bitstream_buffer_id=" << buffer.id(); |
DCHECK(thread_checker_.CalledOnValidThread()); |
- RETURN_ON_FAILURE(buffer.size() >= media_codec_->GetOutputBuffersCapacity(), |
- "Output buffers too small!", |
- kInvalidArgumentError); |
available_bitstream_buffers_.push_back(buffer); |
DoIOTask(); |
} |
@@ -373,21 +377,6 @@ void AndroidVideoEncodeAccelerator::QueueInput() { |
pending_frames_.pop(); |
} |
-bool AndroidVideoEncodeAccelerator::DoOutputBuffersSuffice() { |
- // If this returns false ever, then the VEA::Client interface will need to |
- // grow a DismissBitstreamBuffer() call, and VEA::Client impls will have to be |
- // prepared to field multiple requests to RequireBitstreamBuffers(). |
- int count = media_codec_->GetOutputBuffersCount(); |
- size_t capacity = media_codec_->GetOutputBuffersCapacity(); |
- bool ret = count <= num_output_buffers_ && |
- capacity <= output_buffers_capacity_; |
- LOG_IF(ERROR, !ret) << "Need more/bigger buffers; before: " |
- << num_output_buffers_ << "x" << output_buffers_capacity_ |
- << ", now: " << count << "x" << capacity; |
- UMA_HISTOGRAM_BOOLEAN("Media.AVEA.OutputBuffersSuffice", ret); |
- return ret; |
-} |
- |
void AndroidVideoEncodeAccelerator::DequeueOutput() { |
if (!client_ptr_factory_->GetWeakPtr() || |
available_bitstream_buffers_.empty() || num_buffers_at_codec_ == 0) { |
@@ -410,11 +399,19 @@ void AndroidVideoEncodeAccelerator::DequeueOutput() { |
// Unreachable because of previous statement, but included for clarity. |
return; |
- case media::MEDIA_CODEC_OUTPUT_FORMAT_CHANGED: // Fall-through. |
+ case media::MEDIA_CODEC_OUTPUT_FORMAT_CHANGED: |
+ int width; |
+ int height; |
+ media_codec_->GetOutputFormat(&width, &height); |
liberato (no reviews please)
2016/02/10 18:40:09
i don't understand the goal of asking for output s
magjed_chromium
2016/02/10 20:25:12
It's just a sanity check to make sure the resoluti
|
+ RETURN_ON_FAILURE( |
+ width == frame_size_.width() && height == frame_size_.height(), |
+ "Unexpected resolution change. input: " |
+ << frame_size_.width() << "x" << frame_size_.height() |
+ << ", output: " << width << "x" << height, |
+ kPlatformFailureError); |
+ break; |
+ |
case media::MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED: |
- RETURN_ON_FAILURE(DoOutputBuffersSuffice(), |
- "Bitstream now requires more/larger buffers", |
- kPlatformFailureError); |
break; |
case media::MEDIA_CODEC_OK: |