OLD | NEW |
| (Empty) |
1 // Copyright 2013 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "content/common/gpu/media/android_video_encode_accelerator.h" | |
6 | |
7 #include <memory> | |
8 #include <set> | |
9 | |
10 #include "base/bind.h" | |
11 #include "base/logging.h" | |
12 #include "base/message_loop/message_loop.h" | |
13 #include "base/metrics/histogram.h" | |
14 #include "content/common/gpu/media/shared_memory_region.h" | |
15 #include "gpu/command_buffer/service/gles2_cmd_decoder.h" | |
16 #include "gpu/ipc/service/gpu_channel.h" | |
17 #include "media/base/android/media_codec_util.h" | |
18 #include "media/base/bitstream_buffer.h" | |
19 #include "media/base/limits.h" | |
20 #include "media/video/picture.h" | |
21 #include "third_party/libyuv/include/libyuv/convert_from.h" | |
22 #include "ui/gl/android/scoped_java_surface.h" | |
23 #include "ui/gl/gl_bindings.h" | |
24 | |
25 using media::VideoCodecBridge; | |
26 using media::VideoFrame; | |
27 | |
28 namespace content { | |
29 | |
30 // Limit default max video codec size for Android to avoid | |
31 // HW codec initialization failure for resolution higher than 720p. | |
32 // Default values are from Libjingle "jsepsessiondescription.cc". | |
33 const int kMaxEncodeFrameWidth = 1280; | |
34 const int kMaxEncodeFrameHeight = 720; | |
35 const int kMaxFramerateNumerator = 30; | |
36 const int kMaxFramerateDenominator = 1; | |
37 | |
38 enum PixelFormat { | |
39 // Subset of MediaCodecInfo.CodecCapabilities. | |
40 COLOR_FORMAT_YUV420_PLANAR = 19, | |
41 COLOR_FORMAT_YUV420_SEMIPLANAR = 21, | |
42 }; | |
43 | |
44 // Helper macros for dealing with failure. If |result| evaluates false, emit | |
45 // |log| to DLOG(ERROR), register |error| with the client, and return. | |
46 #define RETURN_ON_FAILURE(result, log, error) \ | |
47 do { \ | |
48 if (!(result)) { \ | |
49 DLOG(ERROR) << log; \ | |
50 if (client_ptr_factory_->GetWeakPtr()) { \ | |
51 client_ptr_factory_->GetWeakPtr()->NotifyError(error); \ | |
52 client_ptr_factory_.reset(); \ | |
53 } \ | |
54 return; \ | |
55 } \ | |
56 } while (0) | |
57 | |
58 // Because MediaCodec is thread-hostile (must be poked on a single thread) and | |
59 // has no callback mechanism (b/11990118), we must drive it by polling for | |
60 // complete frames (and available input buffers, when the codec is fully | |
61 // saturated). This function defines the polling delay. The value used is an | |
62 // arbitrary choice that trades off CPU utilization (spinning) against latency. | |
63 // Mirrors android_video_decode_accelerator.cc::DecodePollDelay(). | |
64 static inline const base::TimeDelta EncodePollDelay() { | |
65 // An alternative to this polling scheme could be to dedicate a new thread | |
66 // (instead of using the ChildThread) to run the MediaCodec, and make that | |
67 // thread use the timeout-based flavor of MediaCodec's dequeue methods when it | |
68 // believes the codec should complete "soon" (e.g. waiting for an input | |
69 // buffer, or waiting for a picture when it knows enough complete input | |
70 // pictures have been fed to saturate any internal buffering). This is | |
71 // speculative and it's unclear that this would be a win (nor that there's a | |
72 // reasonably device-agnostic way to fill in the "believes" above). | |
73 return base::TimeDelta::FromMilliseconds(10); | |
74 } | |
75 | |
76 static inline const base::TimeDelta NoWaitTimeOut() { | |
77 return base::TimeDelta::FromMicroseconds(0); | |
78 } | |
79 | |
80 static bool GetSupportedColorFormatForMime(const std::string& mime, | |
81 PixelFormat* pixel_format) { | |
82 if (mime.empty()) | |
83 return false; | |
84 | |
85 std::set<int> formats = media::MediaCodecUtil::GetEncoderColorFormats(mime); | |
86 if (formats.count(COLOR_FORMAT_YUV420_SEMIPLANAR) > 0) | |
87 *pixel_format = COLOR_FORMAT_YUV420_SEMIPLANAR; | |
88 else if (formats.count(COLOR_FORMAT_YUV420_PLANAR) > 0) | |
89 *pixel_format = COLOR_FORMAT_YUV420_PLANAR; | |
90 else | |
91 return false; | |
92 | |
93 return true; | |
94 } | |
95 | |
96 AndroidVideoEncodeAccelerator::AndroidVideoEncodeAccelerator() | |
97 : num_buffers_at_codec_(0), | |
98 last_set_bitrate_(0) {} | |
99 | |
100 AndroidVideoEncodeAccelerator::~AndroidVideoEncodeAccelerator() { | |
101 DCHECK(thread_checker_.CalledOnValidThread()); | |
102 } | |
103 | |
104 media::VideoEncodeAccelerator::SupportedProfiles | |
105 AndroidVideoEncodeAccelerator::GetSupportedProfiles() { | |
106 SupportedProfiles profiles; | |
107 | |
108 const struct { | |
109 const media::VideoCodec codec; | |
110 const media::VideoCodecProfile profile; | |
111 } kSupportedCodecs[] = { | |
112 { media::kCodecVP8, media::VP8PROFILE_ANY }, | |
113 { media::kCodecH264, media::H264PROFILE_BASELINE }, | |
114 { media::kCodecH264, media::H264PROFILE_MAIN } | |
115 }; | |
116 | |
117 for (const auto& supported_codec : kSupportedCodecs) { | |
118 if (supported_codec.codec == media::kCodecVP8 && | |
119 !media::MediaCodecUtil::IsVp8EncoderAvailable()) { | |
120 continue; | |
121 } | |
122 | |
123 if (VideoCodecBridge::IsKnownUnaccelerated(supported_codec.codec, | |
124 media::MEDIA_CODEC_ENCODER)) { | |
125 continue; | |
126 } | |
127 | |
128 SupportedProfile profile; | |
129 profile.profile = supported_codec.profile; | |
130 // It would be nice if MediaCodec exposes the maximum capabilities of | |
131 // the encoder. Hard-code some reasonable defaults as workaround. | |
132 profile.max_resolution.SetSize(kMaxEncodeFrameWidth, | |
133 kMaxEncodeFrameHeight); | |
134 profile.max_framerate_numerator = kMaxFramerateNumerator; | |
135 profile.max_framerate_denominator = kMaxFramerateDenominator; | |
136 profiles.push_back(profile); | |
137 } | |
138 return profiles; | |
139 } | |
140 | |
141 bool AndroidVideoEncodeAccelerator::Initialize( | |
142 media::VideoPixelFormat format, | |
143 const gfx::Size& input_visible_size, | |
144 media::VideoCodecProfile output_profile, | |
145 uint32_t initial_bitrate, | |
146 Client* client) { | |
147 DVLOG(3) << __PRETTY_FUNCTION__ << " format: " << format | |
148 << ", input_visible_size: " << input_visible_size.ToString() | |
149 << ", output_profile: " << output_profile | |
150 << ", initial_bitrate: " << initial_bitrate; | |
151 DCHECK(!media_codec_); | |
152 DCHECK(thread_checker_.CalledOnValidThread()); | |
153 | |
154 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client)); | |
155 | |
156 if (!(media::MediaCodecUtil::SupportsSetParameters() && | |
157 format == media::PIXEL_FORMAT_I420)) { | |
158 DLOG(ERROR) << "Unexpected combo: " << format << ", " << output_profile; | |
159 return false; | |
160 } | |
161 | |
162 std::string mime_type; | |
163 media::VideoCodec codec; | |
164 // The client should be prepared to feed at least this many frames into the | |
165 // encoder before being returned any output frames, since the encoder may | |
166 // need to hold onto some subset of inputs as reference pictures. | |
167 uint32_t frame_input_count; | |
168 if (output_profile == media::VP8PROFILE_ANY) { | |
169 codec = media::kCodecVP8; | |
170 mime_type = "video/x-vnd.on2.vp8"; | |
171 frame_input_count = 1; | |
172 } else if (output_profile == media::H264PROFILE_BASELINE || | |
173 output_profile == media::H264PROFILE_MAIN) { | |
174 codec = media::kCodecH264; | |
175 mime_type = "video/avc"; | |
176 frame_input_count = 30; | |
177 } else { | |
178 return false; | |
179 } | |
180 | |
181 frame_size_ = input_visible_size; | |
182 last_set_bitrate_ = initial_bitrate; | |
183 | |
184 // Only consider using MediaCodec if it's likely backed by hardware. | |
185 if (media::VideoCodecBridge::IsKnownUnaccelerated( | |
186 codec, media::MEDIA_CODEC_ENCODER)) { | |
187 DLOG(ERROR) << "No HW support"; | |
188 return false; | |
189 } | |
190 | |
191 PixelFormat pixel_format = COLOR_FORMAT_YUV420_SEMIPLANAR; | |
192 if (!GetSupportedColorFormatForMime(mime_type, &pixel_format)) { | |
193 DLOG(ERROR) << "No color format support."; | |
194 return false; | |
195 } | |
196 media_codec_.reset(media::VideoCodecBridge::CreateEncoder(codec, | |
197 input_visible_size, | |
198 initial_bitrate, | |
199 INITIAL_FRAMERATE, | |
200 IFRAME_INTERVAL, | |
201 pixel_format)); | |
202 | |
203 if (!media_codec_) { | |
204 DLOG(ERROR) << "Failed to create/start the codec: " | |
205 << input_visible_size.ToString(); | |
206 return false; | |
207 } | |
208 | |
209 // Conservative upper bound for output buffer size: decoded size + 2KB. | |
210 const size_t output_buffer_capacity = | |
211 VideoFrame::AllocationSize(format, input_visible_size) + 2048; | |
212 base::MessageLoop::current()->PostTask( | |
213 FROM_HERE, | |
214 base::Bind(&VideoEncodeAccelerator::Client::RequireBitstreamBuffers, | |
215 client_ptr_factory_->GetWeakPtr(), | |
216 frame_input_count, | |
217 input_visible_size, | |
218 output_buffer_capacity)); | |
219 return true; | |
220 } | |
221 | |
222 void AndroidVideoEncodeAccelerator::MaybeStartIOTimer() { | |
223 if (!io_timer_.IsRunning() && | |
224 (num_buffers_at_codec_ > 0 || !pending_frames_.empty())) { | |
225 io_timer_.Start(FROM_HERE, | |
226 EncodePollDelay(), | |
227 this, | |
228 &AndroidVideoEncodeAccelerator::DoIOTask); | |
229 } | |
230 } | |
231 | |
232 void AndroidVideoEncodeAccelerator::MaybeStopIOTimer() { | |
233 if (io_timer_.IsRunning() && | |
234 (num_buffers_at_codec_ == 0 && pending_frames_.empty())) { | |
235 io_timer_.Stop(); | |
236 } | |
237 } | |
238 | |
239 void AndroidVideoEncodeAccelerator::Encode( | |
240 const scoped_refptr<VideoFrame>& frame, | |
241 bool force_keyframe) { | |
242 DVLOG(3) << __PRETTY_FUNCTION__ << ": " << force_keyframe; | |
243 DCHECK(thread_checker_.CalledOnValidThread()); | |
244 RETURN_ON_FAILURE(frame->format() == media::PIXEL_FORMAT_I420, | |
245 "Unexpected format", kInvalidArgumentError); | |
246 RETURN_ON_FAILURE(frame->visible_rect().size() == frame_size_, | |
247 "Unexpected resolution", kInvalidArgumentError); | |
248 // MediaCodec doesn't have a way to specify stride for non-Packed formats, so | |
249 // we insist on being called with packed frames and no cropping :( | |
250 RETURN_ON_FAILURE(frame->row_bytes(VideoFrame::kYPlane) == | |
251 frame->stride(VideoFrame::kYPlane) && | |
252 frame->row_bytes(VideoFrame::kUPlane) == | |
253 frame->stride(VideoFrame::kUPlane) && | |
254 frame->row_bytes(VideoFrame::kVPlane) == | |
255 frame->stride(VideoFrame::kVPlane) && | |
256 frame->coded_size() == frame->visible_rect().size(), | |
257 "Non-packed frame, or visible_rect != coded_size", | |
258 kInvalidArgumentError); | |
259 | |
260 pending_frames_.push( | |
261 base::MakeTuple(frame, force_keyframe, base::Time::Now())); | |
262 DoIOTask(); | |
263 } | |
264 | |
265 void AndroidVideoEncodeAccelerator::UseOutputBitstreamBuffer( | |
266 const media::BitstreamBuffer& buffer) { | |
267 DVLOG(3) << __PRETTY_FUNCTION__ << ": bitstream_buffer_id=" << buffer.id(); | |
268 DCHECK(thread_checker_.CalledOnValidThread()); | |
269 available_bitstream_buffers_.push_back(buffer); | |
270 DoIOTask(); | |
271 } | |
272 | |
273 void AndroidVideoEncodeAccelerator::RequestEncodingParametersChange( | |
274 uint32_t bitrate, | |
275 uint32_t framerate) { | |
276 DVLOG(3) << __PRETTY_FUNCTION__ << ": bitrate: " << bitrate | |
277 << ", framerate: " << framerate; | |
278 DCHECK(thread_checker_.CalledOnValidThread()); | |
279 if (bitrate != last_set_bitrate_) { | |
280 last_set_bitrate_ = bitrate; | |
281 media_codec_->SetVideoBitrate(bitrate); | |
282 } | |
283 // Note: Android's MediaCodec doesn't allow mid-stream adjustments to | |
284 // framerate, so we ignore that here. This is OK because Android only uses | |
285 // the framerate value from MediaFormat during configure() as a proxy for | |
286 // bitrate, and we set that explicitly. | |
287 } | |
288 | |
289 void AndroidVideoEncodeAccelerator::Destroy() { | |
290 DVLOG(3) << __PRETTY_FUNCTION__; | |
291 DCHECK(thread_checker_.CalledOnValidThread()); | |
292 client_ptr_factory_.reset(); | |
293 if (media_codec_) { | |
294 if (io_timer_.IsRunning()) | |
295 io_timer_.Stop(); | |
296 media_codec_->Stop(); | |
297 } | |
298 delete this; | |
299 } | |
300 | |
301 void AndroidVideoEncodeAccelerator::DoIOTask() { | |
302 QueueInput(); | |
303 DequeueOutput(); | |
304 MaybeStartIOTimer(); | |
305 MaybeStopIOTimer(); | |
306 } | |
307 | |
308 void AndroidVideoEncodeAccelerator::QueueInput() { | |
309 if (!client_ptr_factory_->GetWeakPtr() || pending_frames_.empty()) | |
310 return; | |
311 | |
312 int input_buf_index = 0; | |
313 media::MediaCodecStatus status = | |
314 media_codec_->DequeueInputBuffer(NoWaitTimeOut(), &input_buf_index); | |
315 if (status != media::MEDIA_CODEC_OK) { | |
316 DCHECK(status == media::MEDIA_CODEC_DEQUEUE_INPUT_AGAIN_LATER || | |
317 status == media::MEDIA_CODEC_ERROR); | |
318 RETURN_ON_FAILURE(status != media::MEDIA_CODEC_ERROR, | |
319 "MediaCodec error", | |
320 kPlatformFailureError); | |
321 return; | |
322 } | |
323 | |
324 const PendingFrames::value_type& input = pending_frames_.front(); | |
325 bool is_key_frame = base::get<1>(input); | |
326 if (is_key_frame) { | |
327 // Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could | |
328 // indicate this in the QueueInputBuffer() call below and guarantee _this_ | |
329 // frame be encoded as a key frame, but sadly that flag is ignored. | |
330 // Instead, we request a key frame "soon". | |
331 media_codec_->RequestKeyFrameSoon(); | |
332 } | |
333 scoped_refptr<VideoFrame> frame = base::get<0>(input); | |
334 | |
335 uint8_t* buffer = NULL; | |
336 size_t capacity = 0; | |
337 status = media_codec_->GetInputBuffer(input_buf_index, &buffer, &capacity); | |
338 RETURN_ON_FAILURE(status == media::MEDIA_CODEC_OK, "GetInputBuffer failed.", | |
339 kPlatformFailureError); | |
340 | |
341 size_t queued_size = | |
342 VideoFrame::AllocationSize(media::PIXEL_FORMAT_I420, frame->coded_size()); | |
343 RETURN_ON_FAILURE(capacity >= queued_size, | |
344 "Failed to get input buffer: " << input_buf_index, | |
345 kPlatformFailureError); | |
346 | |
347 uint8_t* dst_y = buffer; | |
348 int dst_stride_y = frame->stride(VideoFrame::kYPlane); | |
349 uint8_t* dst_uv = | |
350 buffer + | |
351 frame->stride(VideoFrame::kYPlane) * frame->rows(VideoFrame::kYPlane); | |
352 int dst_stride_uv = frame->stride(VideoFrame::kUPlane) * 2; | |
353 // Why NV12? Because COLOR_FORMAT_YUV420_SEMIPLANAR. See comment at other | |
354 // mention of that constant. | |
355 bool converted = !libyuv::I420ToNV12(frame->data(VideoFrame::kYPlane), | |
356 frame->stride(VideoFrame::kYPlane), | |
357 frame->data(VideoFrame::kUPlane), | |
358 frame->stride(VideoFrame::kUPlane), | |
359 frame->data(VideoFrame::kVPlane), | |
360 frame->stride(VideoFrame::kVPlane), | |
361 dst_y, | |
362 dst_stride_y, | |
363 dst_uv, | |
364 dst_stride_uv, | |
365 frame->coded_size().width(), | |
366 frame->coded_size().height()); | |
367 RETURN_ON_FAILURE(converted, "Failed to I420ToNV12!", kPlatformFailureError); | |
368 | |
369 fake_input_timestamp_ += base::TimeDelta::FromMicroseconds(1); | |
370 status = media_codec_->QueueInputBuffer( | |
371 input_buf_index, NULL, queued_size, fake_input_timestamp_); | |
372 UMA_HISTOGRAM_TIMES("Media.AVEA.InputQueueTime", | |
373 base::Time::Now() - base::get<2>(input)); | |
374 RETURN_ON_FAILURE(status == media::MEDIA_CODEC_OK, | |
375 "Failed to QueueInputBuffer: " << status, | |
376 kPlatformFailureError); | |
377 ++num_buffers_at_codec_; | |
378 pending_frames_.pop(); | |
379 } | |
380 | |
381 void AndroidVideoEncodeAccelerator::DequeueOutput() { | |
382 if (!client_ptr_factory_->GetWeakPtr() || | |
383 available_bitstream_buffers_.empty() || num_buffers_at_codec_ == 0) { | |
384 return; | |
385 } | |
386 | |
387 int32_t buf_index = 0; | |
388 size_t offset = 0; | |
389 size_t size = 0; | |
390 bool key_frame = false; | |
391 do { | |
392 media::MediaCodecStatus status = media_codec_->DequeueOutputBuffer( | |
393 NoWaitTimeOut(), &buf_index, &offset, &size, NULL, NULL, &key_frame); | |
394 switch (status) { | |
395 case media::MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER: | |
396 return; | |
397 | |
398 case media::MEDIA_CODEC_ERROR: | |
399 RETURN_ON_FAILURE(false, "Codec error", kPlatformFailureError); | |
400 // Unreachable because of previous statement, but included for clarity. | |
401 return; | |
402 | |
403 case media::MEDIA_CODEC_OUTPUT_FORMAT_CHANGED: | |
404 RETURN_ON_FAILURE(false, "Unexpected output format change", | |
405 kPlatformFailureError); | |
406 break; | |
407 | |
408 case media::MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED: | |
409 break; | |
410 | |
411 case media::MEDIA_CODEC_OK: | |
412 DCHECK_GE(buf_index, 0); | |
413 break; | |
414 | |
415 default: | |
416 NOTREACHED(); | |
417 break; | |
418 } | |
419 } while (buf_index < 0); | |
420 | |
421 media::BitstreamBuffer bitstream_buffer = available_bitstream_buffers_.back(); | |
422 available_bitstream_buffers_.pop_back(); | |
423 std::unique_ptr<SharedMemoryRegion> shm( | |
424 new SharedMemoryRegion(bitstream_buffer, false)); | |
425 RETURN_ON_FAILURE(shm->Map(), "Failed to map SHM", kPlatformFailureError); | |
426 RETURN_ON_FAILURE(size <= shm->size(), | |
427 "Encoded buffer too large: " << size << ">" << shm->size(), | |
428 kPlatformFailureError); | |
429 | |
430 media::MediaCodecStatus status = media_codec_->CopyFromOutputBuffer( | |
431 buf_index, offset, shm->memory(), size); | |
432 RETURN_ON_FAILURE(status == media::MEDIA_CODEC_OK, | |
433 "CopyFromOutputBuffer failed", kPlatformFailureError); | |
434 media_codec_->ReleaseOutputBuffer(buf_index, false); | |
435 --num_buffers_at_codec_; | |
436 | |
437 UMA_HISTOGRAM_COUNTS_10000("Media.AVEA.EncodedBufferSizeKB", size / 1024); | |
438 base::MessageLoop::current()->PostTask( | |
439 FROM_HERE, | |
440 base::Bind(&VideoEncodeAccelerator::Client::BitstreamBufferReady, | |
441 client_ptr_factory_->GetWeakPtr(), | |
442 bitstream_buffer.id(), | |
443 size, | |
444 key_frame)); | |
445 } | |
446 | |
447 } // namespace content | |
OLD | NEW |