OLD | NEW |
---|---|
1 // Copyright 2013 The Chromium Authors. All rights reserved. | 1 // Copyright 2013 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/common/gpu/media/android_video_encode_accelerator.h" | 5 #include "content/common/gpu/media/android_video_encode_accelerator.h" |
6 | 6 |
7 #include <set> | 7 #include <set> |
8 | 8 |
9 #include "base/bind.h" | 9 #include "base/bind.h" |
10 #include "base/command_line.h" | 10 #include "base/command_line.h" |
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
88 else if (formats.count(COLOR_FORMAT_YUV420_PLANAR) > 0) | 88 else if (formats.count(COLOR_FORMAT_YUV420_PLANAR) > 0) |
89 *pixel_format = COLOR_FORMAT_YUV420_PLANAR; | 89 *pixel_format = COLOR_FORMAT_YUV420_PLANAR; |
90 else | 90 else |
91 return false; | 91 return false; |
92 | 92 |
93 return true; | 93 return true; |
94 } | 94 } |
95 | 95 |
96 AndroidVideoEncodeAccelerator::AndroidVideoEncodeAccelerator() | 96 AndroidVideoEncodeAccelerator::AndroidVideoEncodeAccelerator() |
97 : num_buffers_at_codec_(0), | 97 : num_buffers_at_codec_(0), |
98 num_output_buffers_(-1), | |
99 output_buffers_capacity_(0), | |
100 last_set_bitrate_(0) {} | 98 last_set_bitrate_(0) {} |
101 | 99 |
102 AndroidVideoEncodeAccelerator::~AndroidVideoEncodeAccelerator() { | 100 AndroidVideoEncodeAccelerator::~AndroidVideoEncodeAccelerator() { |
103 DCHECK(thread_checker_.CalledOnValidThread()); | 101 DCHECK(thread_checker_.CalledOnValidThread()); |
104 } | 102 } |
105 | 103 |
106 media::VideoEncodeAccelerator::SupportedProfiles | 104 media::VideoEncodeAccelerator::SupportedProfiles |
107 AndroidVideoEncodeAccelerator::GetSupportedProfiles() { | 105 AndroidVideoEncodeAccelerator::GetSupportedProfiles() { |
108 SupportedProfiles profiles; | 106 SupportedProfiles profiles; |
109 | 107 |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
157 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client)); | 155 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client)); |
158 | 156 |
159 if (!(media::MediaCodecUtil::SupportsSetParameters() && | 157 if (!(media::MediaCodecUtil::SupportsSetParameters() && |
160 format == media::PIXEL_FORMAT_I420)) { | 158 format == media::PIXEL_FORMAT_I420)) { |
161 DLOG(ERROR) << "Unexpected combo: " << format << ", " << output_profile; | 159 DLOG(ERROR) << "Unexpected combo: " << format << ", " << output_profile; |
162 return false; | 160 return false; |
163 } | 161 } |
164 | 162 |
165 std::string mime_type; | 163 std::string mime_type; |
166 media::VideoCodec codec; | 164 media::VideoCodec codec; |
165 // The client should be prepared to feed at least this many frames into the | |
166 // encoder before being returned any output frames, since the encoder may | |
167 // need to hold onto some subset of inputs as reference pictures. | |
168 unsigned int frame_input_count; | |
DaleCurtis
2016/02/10 17:52:48
We don't use unsigned in Chromium code, this shoul
magjed_chromium
2016/02/10 20:25:12
Done, changed to uint32_t. The reason I used 'unsi
| |
167 if (output_profile == media::VP8PROFILE_ANY) { | 169 if (output_profile == media::VP8PROFILE_ANY) { |
168 codec = media::kCodecVP8; | 170 codec = media::kCodecVP8; |
169 mime_type = "video/x-vnd.on2.vp8"; | 171 mime_type = "video/x-vnd.on2.vp8"; |
172 frame_input_count = 1; | |
170 } else if (output_profile == media::H264PROFILE_BASELINE || | 173 } else if (output_profile == media::H264PROFILE_BASELINE || |
171 output_profile == media::H264PROFILE_MAIN) { | 174 output_profile == media::H264PROFILE_MAIN) { |
172 codec = media::kCodecH264; | 175 codec = media::kCodecH264; |
173 mime_type = "video/avc"; | 176 mime_type = "video/avc"; |
177 frame_input_count = 30; | |
174 } else { | 178 } else { |
175 return false; | 179 return false; |
176 } | 180 } |
177 | 181 |
182 frame_size_ = input_visible_size; | |
178 last_set_bitrate_ = initial_bitrate; | 183 last_set_bitrate_ = initial_bitrate; |
179 | 184 |
180 // Only consider using MediaCodec if it's likely backed by hardware. | 185 // Only consider using MediaCodec if it's likely backed by hardware. |
181 if (media::VideoCodecBridge::IsKnownUnaccelerated( | 186 if (media::VideoCodecBridge::IsKnownUnaccelerated( |
182 codec, media::MEDIA_CODEC_ENCODER)) { | 187 codec, media::MEDIA_CODEC_ENCODER)) { |
183 DLOG(ERROR) << "No HW support"; | 188 DLOG(ERROR) << "No HW support"; |
184 return false; | 189 return false; |
185 } | 190 } |
186 | 191 |
187 PixelFormat pixel_format = COLOR_FORMAT_YUV420_SEMIPLANAR; | 192 PixelFormat pixel_format = COLOR_FORMAT_YUV420_SEMIPLANAR; |
188 if (!GetSupportedColorFormatForMime(mime_type, &pixel_format)) { | 193 if (!GetSupportedColorFormatForMime(mime_type, &pixel_format)) { |
189 DLOG(ERROR) << "No color format support."; | 194 DLOG(ERROR) << "No color format support."; |
190 return false; | 195 return false; |
191 } | 196 } |
192 media_codec_.reset(media::VideoCodecBridge::CreateEncoder(codec, | 197 media_codec_.reset(media::VideoCodecBridge::CreateEncoder(codec, |
193 input_visible_size, | 198 input_visible_size, |
194 initial_bitrate, | 199 initial_bitrate, |
195 INITIAL_FRAMERATE, | 200 INITIAL_FRAMERATE, |
196 IFRAME_INTERVAL, | 201 IFRAME_INTERVAL, |
197 pixel_format)); | 202 pixel_format)); |
198 | 203 |
199 if (!media_codec_) { | 204 if (!media_codec_) { |
200 DLOG(ERROR) << "Failed to create/start the codec: " | 205 DLOG(ERROR) << "Failed to create/start the codec: " |
201 << input_visible_size.ToString(); | 206 << input_visible_size.ToString(); |
202 return false; | 207 return false; |
203 } | 208 } |
204 | 209 |
205 num_output_buffers_ = media_codec_->GetOutputBuffersCount(); | 210 // Conservative upper bound for output buffer size: decoded size + 2KB. |
206 output_buffers_capacity_ = media_codec_->GetOutputBuffersCapacity(); | 211 const size_t output_buffer_capacity = |
212 VideoFrame::AllocationSize(format, input_visible_size) + 2048; | |
207 base::MessageLoop::current()->PostTask( | 213 base::MessageLoop::current()->PostTask( |
208 FROM_HERE, | 214 FROM_HERE, |
209 base::Bind(&VideoEncodeAccelerator::Client::RequireBitstreamBuffers, | 215 base::Bind(&VideoEncodeAccelerator::Client::RequireBitstreamBuffers, |
210 client_ptr_factory_->GetWeakPtr(), | 216 client_ptr_factory_->GetWeakPtr(), |
211 num_output_buffers_, | 217 frame_input_count, |
212 input_visible_size, | 218 input_visible_size, |
213 output_buffers_capacity_)); | 219 output_buffer_capacity)); |
214 return true; | 220 return true; |
215 } | 221 } |
216 | 222 |
217 void AndroidVideoEncodeAccelerator::MaybeStartIOTimer() { | 223 void AndroidVideoEncodeAccelerator::MaybeStartIOTimer() { |
218 if (!io_timer_.IsRunning() && | 224 if (!io_timer_.IsRunning() && |
219 (num_buffers_at_codec_ > 0 || !pending_frames_.empty())) { | 225 (num_buffers_at_codec_ > 0 || !pending_frames_.empty())) { |
220 io_timer_.Start(FROM_HERE, | 226 io_timer_.Start(FROM_HERE, |
221 EncodePollDelay(), | 227 EncodePollDelay(), |
222 this, | 228 this, |
223 &AndroidVideoEncodeAccelerator::DoIOTask); | 229 &AndroidVideoEncodeAccelerator::DoIOTask); |
224 } | 230 } |
225 } | 231 } |
226 | 232 |
227 void AndroidVideoEncodeAccelerator::MaybeStopIOTimer() { | 233 void AndroidVideoEncodeAccelerator::MaybeStopIOTimer() { |
228 if (io_timer_.IsRunning() && | 234 if (io_timer_.IsRunning() && |
229 (num_buffers_at_codec_ == 0 && pending_frames_.empty())) { | 235 (num_buffers_at_codec_ == 0 && pending_frames_.empty())) { |
230 io_timer_.Stop(); | 236 io_timer_.Stop(); |
231 } | 237 } |
232 } | 238 } |
233 | 239 |
234 void AndroidVideoEncodeAccelerator::Encode( | 240 void AndroidVideoEncodeAccelerator::Encode( |
235 const scoped_refptr<VideoFrame>& frame, | 241 const scoped_refptr<VideoFrame>& frame, |
236 bool force_keyframe) { | 242 bool force_keyframe) { |
237 DVLOG(3) << __PRETTY_FUNCTION__ << ": " << force_keyframe; | 243 DVLOG(3) << __PRETTY_FUNCTION__ << ": " << force_keyframe; |
238 DCHECK(thread_checker_.CalledOnValidThread()); | 244 DCHECK(thread_checker_.CalledOnValidThread()); |
239 RETURN_ON_FAILURE(frame->format() == media::PIXEL_FORMAT_I420, | 245 RETURN_ON_FAILURE(frame->format() == media::PIXEL_FORMAT_I420, |
240 "Unexpected format", kInvalidArgumentError); | 246 "Unexpected format", kInvalidArgumentError); |
241 | 247 RETURN_ON_FAILURE(frame->visible_rect().size() == frame_size_, |
248 "Unexpected resolution", kInvalidArgumentError); | |
242 // MediaCodec doesn't have a way to specify stride for non-Packed formats, so | 249 // MediaCodec doesn't have a way to specify stride for non-Packed formats, so |
243 // we insist on being called with packed frames and no cropping :( | 250 // we insist on being called with packed frames and no cropping :( |
244 RETURN_ON_FAILURE(frame->row_bytes(VideoFrame::kYPlane) == | 251 RETURN_ON_FAILURE(frame->row_bytes(VideoFrame::kYPlane) == |
245 frame->stride(VideoFrame::kYPlane) && | 252 frame->stride(VideoFrame::kYPlane) && |
246 frame->row_bytes(VideoFrame::kUPlane) == | 253 frame->row_bytes(VideoFrame::kUPlane) == |
247 frame->stride(VideoFrame::kUPlane) && | 254 frame->stride(VideoFrame::kUPlane) && |
248 frame->row_bytes(VideoFrame::kVPlane) == | 255 frame->row_bytes(VideoFrame::kVPlane) == |
249 frame->stride(VideoFrame::kVPlane) && | 256 frame->stride(VideoFrame::kVPlane) && |
250 frame->coded_size() == frame->visible_rect().size(), | 257 frame->coded_size() == frame->visible_rect().size(), |
251 "Non-packed frame, or visible_rect != coded_size", | 258 "Non-packed frame, or visible_rect != coded_size", |
252 kInvalidArgumentError); | 259 kInvalidArgumentError); |
253 | 260 |
254 pending_frames_.push( | 261 pending_frames_.push( |
255 base::MakeTuple(frame, force_keyframe, base::Time::Now())); | 262 base::MakeTuple(frame, force_keyframe, base::Time::Now())); |
256 DoIOTask(); | 263 DoIOTask(); |
257 } | 264 } |
258 | 265 |
259 void AndroidVideoEncodeAccelerator::UseOutputBitstreamBuffer( | 266 void AndroidVideoEncodeAccelerator::UseOutputBitstreamBuffer( |
260 const media::BitstreamBuffer& buffer) { | 267 const media::BitstreamBuffer& buffer) { |
261 DVLOG(3) << __PRETTY_FUNCTION__ << ": bitstream_buffer_id=" << buffer.id(); | 268 DVLOG(3) << __PRETTY_FUNCTION__ << ": bitstream_buffer_id=" << buffer.id(); |
262 DCHECK(thread_checker_.CalledOnValidThread()); | 269 DCHECK(thread_checker_.CalledOnValidThread()); |
263 RETURN_ON_FAILURE(buffer.size() >= media_codec_->GetOutputBuffersCapacity(), | |
264 "Output buffers too small!", | |
265 kInvalidArgumentError); | |
266 available_bitstream_buffers_.push_back(buffer); | 270 available_bitstream_buffers_.push_back(buffer); |
267 DoIOTask(); | 271 DoIOTask(); |
268 } | 272 } |
269 | 273 |
270 void AndroidVideoEncodeAccelerator::RequestEncodingParametersChange( | 274 void AndroidVideoEncodeAccelerator::RequestEncodingParametersChange( |
271 uint32_t bitrate, | 275 uint32_t bitrate, |
272 uint32_t framerate) { | 276 uint32_t framerate) { |
273 DVLOG(3) << __PRETTY_FUNCTION__ << ": bitrate: " << bitrate | 277 DVLOG(3) << __PRETTY_FUNCTION__ << ": bitrate: " << bitrate |
274 << ", framerate: " << framerate; | 278 << ", framerate: " << framerate; |
275 DCHECK(thread_checker_.CalledOnValidThread()); | 279 DCHECK(thread_checker_.CalledOnValidThread()); |
(...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
366 input_buf_index, NULL, queued_size, fake_input_timestamp_); | 370 input_buf_index, NULL, queued_size, fake_input_timestamp_); |
367 UMA_HISTOGRAM_TIMES("Media.AVEA.InputQueueTime", | 371 UMA_HISTOGRAM_TIMES("Media.AVEA.InputQueueTime", |
368 base::Time::Now() - base::get<2>(input)); | 372 base::Time::Now() - base::get<2>(input)); |
369 RETURN_ON_FAILURE(status == media::MEDIA_CODEC_OK, | 373 RETURN_ON_FAILURE(status == media::MEDIA_CODEC_OK, |
370 "Failed to QueueInputBuffer: " << status, | 374 "Failed to QueueInputBuffer: " << status, |
371 kPlatformFailureError); | 375 kPlatformFailureError); |
372 ++num_buffers_at_codec_; | 376 ++num_buffers_at_codec_; |
373 pending_frames_.pop(); | 377 pending_frames_.pop(); |
374 } | 378 } |
375 | 379 |
376 bool AndroidVideoEncodeAccelerator::DoOutputBuffersSuffice() { | |
377 // If this returns false ever, then the VEA::Client interface will need to | |
378 // grow a DismissBitstreamBuffer() call, and VEA::Client impls will have to be | |
379 // prepared to field multiple requests to RequireBitstreamBuffers(). | |
380 int count = media_codec_->GetOutputBuffersCount(); | |
381 size_t capacity = media_codec_->GetOutputBuffersCapacity(); | |
382 bool ret = count <= num_output_buffers_ && | |
383 capacity <= output_buffers_capacity_; | |
384 LOG_IF(ERROR, !ret) << "Need more/bigger buffers; before: " | |
385 << num_output_buffers_ << "x" << output_buffers_capacity_ | |
386 << ", now: " << count << "x" << capacity; | |
387 UMA_HISTOGRAM_BOOLEAN("Media.AVEA.OutputBuffersSuffice", ret); | |
388 return ret; | |
389 } | |
390 | |
391 void AndroidVideoEncodeAccelerator::DequeueOutput() { | 380 void AndroidVideoEncodeAccelerator::DequeueOutput() { |
392 if (!client_ptr_factory_->GetWeakPtr() || | 381 if (!client_ptr_factory_->GetWeakPtr() || |
393 available_bitstream_buffers_.empty() || num_buffers_at_codec_ == 0) { | 382 available_bitstream_buffers_.empty() || num_buffers_at_codec_ == 0) { |
394 return; | 383 return; |
395 } | 384 } |
396 | 385 |
397 int32_t buf_index = 0; | 386 int32_t buf_index = 0; |
398 size_t offset = 0; | 387 size_t offset = 0; |
399 size_t size = 0; | 388 size_t size = 0; |
400 bool key_frame = false; | 389 bool key_frame = false; |
401 do { | 390 do { |
402 media::MediaCodecStatus status = media_codec_->DequeueOutputBuffer( | 391 media::MediaCodecStatus status = media_codec_->DequeueOutputBuffer( |
403 NoWaitTimeOut(), &buf_index, &offset, &size, NULL, NULL, &key_frame); | 392 NoWaitTimeOut(), &buf_index, &offset, &size, NULL, NULL, &key_frame); |
404 switch (status) { | 393 switch (status) { |
405 case media::MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER: | 394 case media::MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER: |
406 return; | 395 return; |
407 | 396 |
408 case media::MEDIA_CODEC_ERROR: | 397 case media::MEDIA_CODEC_ERROR: |
409 RETURN_ON_FAILURE(false, "Codec error", kPlatformFailureError); | 398 RETURN_ON_FAILURE(false, "Codec error", kPlatformFailureError); |
410 // Unreachable because of previous statement, but included for clarity. | 399 // Unreachable because of previous statement, but included for clarity. |
411 return; | 400 return; |
412 | 401 |
413 case media::MEDIA_CODEC_OUTPUT_FORMAT_CHANGED: // Fall-through. | 402 case media::MEDIA_CODEC_OUTPUT_FORMAT_CHANGED: |
403 int width; | |
404 int height; | |
405 media_codec_->GetOutputFormat(&width, &height); | |
liberato (no reviews please)
2016/02/10 18:40:09
i don't understand the goal of asking for output s
magjed_chromium
2016/02/10 20:25:12
It's just a sanity check to make sure the resoluti
| |
406 RETURN_ON_FAILURE( | |
407 width == frame_size_.width() && height == frame_size_.height(), | |
408 "Unexpected resolution change. input: " | |
409 << frame_size_.width() << "x" << frame_size_.height() | |
410 << ", output: " << width << "x" << height, | |
411 kPlatformFailureError); | |
412 break; | |
413 | |
414 case media::MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED: | 414 case media::MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED: |
415 RETURN_ON_FAILURE(DoOutputBuffersSuffice(), | |
416 "Bitstream now requires more/larger buffers", | |
417 kPlatformFailureError); | |
418 break; | 415 break; |
419 | 416 |
420 case media::MEDIA_CODEC_OK: | 417 case media::MEDIA_CODEC_OK: |
421 DCHECK_GE(buf_index, 0); | 418 DCHECK_GE(buf_index, 0); |
422 break; | 419 break; |
423 | 420 |
424 default: | 421 default: |
425 NOTREACHED(); | 422 NOTREACHED(); |
426 break; | 423 break; |
427 } | 424 } |
(...skipping 19 matching lines...) Expand all Loading... | |
447 base::MessageLoop::current()->PostTask( | 444 base::MessageLoop::current()->PostTask( |
448 FROM_HERE, | 445 FROM_HERE, |
449 base::Bind(&VideoEncodeAccelerator::Client::BitstreamBufferReady, | 446 base::Bind(&VideoEncodeAccelerator::Client::BitstreamBufferReady, |
450 client_ptr_factory_->GetWeakPtr(), | 447 client_ptr_factory_->GetWeakPtr(), |
451 bitstream_buffer.id(), | 448 bitstream_buffer.id(), |
452 size, | 449 size, |
453 key_frame)); | 450 key_frame)); |
454 } | 451 } |
455 | 452 |
456 } // namespace content | 453 } // namespace content |
OLD | NEW |