| OLD | NEW |
| (Empty) |
| 1 // Copyright 2013 The Chromium Authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #include "content/common/gpu/media/android_video_encode_accelerator.h" | |
| 6 | |
| 7 #include <set> | |
| 8 | |
| 9 #include "base/bind.h" | |
| 10 #include "base/logging.h" | |
| 11 #include "base/message_loop/message_loop.h" | |
| 12 #include "base/metrics/histogram.h" | |
| 13 #include "content/common/gpu/media/shared_memory_region.h" | |
| 14 #include "gpu/command_buffer/service/gles2_cmd_decoder.h" | |
| 15 #include "gpu/ipc/service/gpu_channel.h" | |
| 16 #include "media/base/android/media_codec_util.h" | |
| 17 #include "media/base/bitstream_buffer.h" | |
| 18 #include "media/base/limits.h" | |
| 19 #include "media/video/picture.h" | |
| 20 #include "third_party/libyuv/include/libyuv/convert_from.h" | |
| 21 #include "ui/gl/android/scoped_java_surface.h" | |
| 22 #include "ui/gl/gl_bindings.h" | |
| 23 | |
| 24 using media::VideoCodecBridge; | |
| 25 using media::VideoFrame; | |
| 26 | |
| 27 namespace content { | |
| 28 | |
| 29 // Limit default max video codec size for Android to avoid | |
| 30 // HW codec initialization failure for resolution higher than 720p. | |
| 31 // Default values are from Libjingle "jsepsessiondescription.cc". | |
| 32 const int kMaxEncodeFrameWidth = 1280; | |
| 33 const int kMaxEncodeFrameHeight = 720; | |
| 34 const int kMaxFramerateNumerator = 30; | |
| 35 const int kMaxFramerateDenominator = 1; | |
| 36 | |
| 37 enum PixelFormat { | |
| 38 // Subset of MediaCodecInfo.CodecCapabilities. | |
| 39 COLOR_FORMAT_YUV420_PLANAR = 19, | |
| 40 COLOR_FORMAT_YUV420_SEMIPLANAR = 21, | |
| 41 }; | |
| 42 | |
| 43 // Helper macros for dealing with failure. If |result| evaluates false, emit | |
| 44 // |log| to DLOG(ERROR), register |error| with the client, and return. | |
| 45 #define RETURN_ON_FAILURE(result, log, error) \ | |
| 46 do { \ | |
| 47 if (!(result)) { \ | |
| 48 DLOG(ERROR) << log; \ | |
| 49 if (client_ptr_factory_->GetWeakPtr()) { \ | |
| 50 client_ptr_factory_->GetWeakPtr()->NotifyError(error); \ | |
| 51 client_ptr_factory_.reset(); \ | |
| 52 } \ | |
| 53 return; \ | |
| 54 } \ | |
| 55 } while (0) | |
| 56 | |
| 57 // Because MediaCodec is thread-hostile (must be poked on a single thread) and | |
| 58 // has no callback mechanism (b/11990118), we must drive it by polling for | |
| 59 // complete frames (and available input buffers, when the codec is fully | |
| 60 // saturated). This function defines the polling delay. The value used is an | |
| 61 // arbitrary choice that trades off CPU utilization (spinning) against latency. | |
| 62 // Mirrors android_video_decode_accelerator.cc::DecodePollDelay(). | |
| 63 static inline const base::TimeDelta EncodePollDelay() { | |
| 64 // An alternative to this polling scheme could be to dedicate a new thread | |
| 65 // (instead of using the ChildThread) to run the MediaCodec, and make that | |
| 66 // thread use the timeout-based flavor of MediaCodec's dequeue methods when it | |
| 67 // believes the codec should complete "soon" (e.g. waiting for an input | |
| 68 // buffer, or waiting for a picture when it knows enough complete input | |
| 69 // pictures have been fed to saturate any internal buffering). This is | |
| 70 // speculative and it's unclear that this would be a win (nor that there's a | |
| 71 // reasonably device-agnostic way to fill in the "believes" above). | |
| 72 return base::TimeDelta::FromMilliseconds(10); | |
| 73 } | |
| 74 | |
| 75 static inline const base::TimeDelta NoWaitTimeOut() { | |
| 76 return base::TimeDelta::FromMicroseconds(0); | |
| 77 } | |
| 78 | |
| 79 static bool GetSupportedColorFormatForMime(const std::string& mime, | |
| 80 PixelFormat* pixel_format) { | |
| 81 if (mime.empty()) | |
| 82 return false; | |
| 83 | |
| 84 std::set<int> formats = media::MediaCodecUtil::GetEncoderColorFormats(mime); | |
| 85 if (formats.count(COLOR_FORMAT_YUV420_SEMIPLANAR) > 0) | |
| 86 *pixel_format = COLOR_FORMAT_YUV420_SEMIPLANAR; | |
| 87 else if (formats.count(COLOR_FORMAT_YUV420_PLANAR) > 0) | |
| 88 *pixel_format = COLOR_FORMAT_YUV420_PLANAR; | |
| 89 else | |
| 90 return false; | |
| 91 | |
| 92 return true; | |
| 93 } | |
| 94 | |
| 95 AndroidVideoEncodeAccelerator::AndroidVideoEncodeAccelerator() | |
| 96 : num_buffers_at_codec_(0), | |
| 97 last_set_bitrate_(0) {} | |
| 98 | |
| 99 AndroidVideoEncodeAccelerator::~AndroidVideoEncodeAccelerator() { | |
| 100 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 101 } | |
| 102 | |
| 103 media::VideoEncodeAccelerator::SupportedProfiles | |
| 104 AndroidVideoEncodeAccelerator::GetSupportedProfiles() { | |
| 105 SupportedProfiles profiles; | |
| 106 | |
| 107 const struct { | |
| 108 const media::VideoCodec codec; | |
| 109 const media::VideoCodecProfile profile; | |
| 110 } kSupportedCodecs[] = { | |
| 111 { media::kCodecVP8, media::VP8PROFILE_ANY }, | |
| 112 { media::kCodecH264, media::H264PROFILE_BASELINE }, | |
| 113 { media::kCodecH264, media::H264PROFILE_MAIN } | |
| 114 }; | |
| 115 | |
| 116 for (const auto& supported_codec : kSupportedCodecs) { | |
| 117 if (supported_codec.codec == media::kCodecVP8 && | |
| 118 !media::MediaCodecUtil::IsVp8EncoderAvailable()) { | |
| 119 continue; | |
| 120 } | |
| 121 | |
| 122 if (VideoCodecBridge::IsKnownUnaccelerated(supported_codec.codec, | |
| 123 media::MEDIA_CODEC_ENCODER)) { | |
| 124 continue; | |
| 125 } | |
| 126 | |
| 127 SupportedProfile profile; | |
| 128 profile.profile = supported_codec.profile; | |
| 129 // It would be nice if MediaCodec exposes the maximum capabilities of | |
| 130 // the encoder. Hard-code some reasonable defaults as workaround. | |
| 131 profile.max_resolution.SetSize(kMaxEncodeFrameWidth, | |
| 132 kMaxEncodeFrameHeight); | |
| 133 profile.max_framerate_numerator = kMaxFramerateNumerator; | |
| 134 profile.max_framerate_denominator = kMaxFramerateDenominator; | |
| 135 profiles.push_back(profile); | |
| 136 } | |
| 137 return profiles; | |
| 138 } | |
| 139 | |
| 140 bool AndroidVideoEncodeAccelerator::Initialize( | |
| 141 media::VideoPixelFormat format, | |
| 142 const gfx::Size& input_visible_size, | |
| 143 media::VideoCodecProfile output_profile, | |
| 144 uint32_t initial_bitrate, | |
| 145 Client* client) { | |
| 146 DVLOG(3) << __PRETTY_FUNCTION__ << " format: " << format | |
| 147 << ", input_visible_size: " << input_visible_size.ToString() | |
| 148 << ", output_profile: " << output_profile | |
| 149 << ", initial_bitrate: " << initial_bitrate; | |
| 150 DCHECK(!media_codec_); | |
| 151 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 152 | |
| 153 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client)); | |
| 154 | |
| 155 if (!(media::MediaCodecUtil::SupportsSetParameters() && | |
| 156 format == media::PIXEL_FORMAT_I420)) { | |
| 157 DLOG(ERROR) << "Unexpected combo: " << format << ", " << output_profile; | |
| 158 return false; | |
| 159 } | |
| 160 | |
| 161 std::string mime_type; | |
| 162 media::VideoCodec codec; | |
| 163 // The client should be prepared to feed at least this many frames into the | |
| 164 // encoder before being returned any output frames, since the encoder may | |
| 165 // need to hold onto some subset of inputs as reference pictures. | |
| 166 uint32_t frame_input_count; | |
| 167 if (output_profile == media::VP8PROFILE_ANY) { | |
| 168 codec = media::kCodecVP8; | |
| 169 mime_type = "video/x-vnd.on2.vp8"; | |
| 170 frame_input_count = 1; | |
| 171 } else if (output_profile == media::H264PROFILE_BASELINE || | |
| 172 output_profile == media::H264PROFILE_MAIN) { | |
| 173 codec = media::kCodecH264; | |
| 174 mime_type = "video/avc"; | |
| 175 frame_input_count = 30; | |
| 176 } else { | |
| 177 return false; | |
| 178 } | |
| 179 | |
| 180 frame_size_ = input_visible_size; | |
| 181 last_set_bitrate_ = initial_bitrate; | |
| 182 | |
| 183 // Only consider using MediaCodec if it's likely backed by hardware. | |
| 184 if (media::VideoCodecBridge::IsKnownUnaccelerated( | |
| 185 codec, media::MEDIA_CODEC_ENCODER)) { | |
| 186 DLOG(ERROR) << "No HW support"; | |
| 187 return false; | |
| 188 } | |
| 189 | |
| 190 PixelFormat pixel_format = COLOR_FORMAT_YUV420_SEMIPLANAR; | |
| 191 if (!GetSupportedColorFormatForMime(mime_type, &pixel_format)) { | |
| 192 DLOG(ERROR) << "No color format support."; | |
| 193 return false; | |
| 194 } | |
| 195 media_codec_.reset(media::VideoCodecBridge::CreateEncoder(codec, | |
| 196 input_visible_size, | |
| 197 initial_bitrate, | |
| 198 INITIAL_FRAMERATE, | |
| 199 IFRAME_INTERVAL, | |
| 200 pixel_format)); | |
| 201 | |
| 202 if (!media_codec_) { | |
| 203 DLOG(ERROR) << "Failed to create/start the codec: " | |
| 204 << input_visible_size.ToString(); | |
| 205 return false; | |
| 206 } | |
| 207 | |
| 208 // Conservative upper bound for output buffer size: decoded size + 2KB. | |
| 209 const size_t output_buffer_capacity = | |
| 210 VideoFrame::AllocationSize(format, input_visible_size) + 2048; | |
| 211 base::MessageLoop::current()->PostTask( | |
| 212 FROM_HERE, | |
| 213 base::Bind(&VideoEncodeAccelerator::Client::RequireBitstreamBuffers, | |
| 214 client_ptr_factory_->GetWeakPtr(), | |
| 215 frame_input_count, | |
| 216 input_visible_size, | |
| 217 output_buffer_capacity)); | |
| 218 return true; | |
| 219 } | |
| 220 | |
| 221 void AndroidVideoEncodeAccelerator::MaybeStartIOTimer() { | |
| 222 if (!io_timer_.IsRunning() && | |
| 223 (num_buffers_at_codec_ > 0 || !pending_frames_.empty())) { | |
| 224 io_timer_.Start(FROM_HERE, | |
| 225 EncodePollDelay(), | |
| 226 this, | |
| 227 &AndroidVideoEncodeAccelerator::DoIOTask); | |
| 228 } | |
| 229 } | |
| 230 | |
| 231 void AndroidVideoEncodeAccelerator::MaybeStopIOTimer() { | |
| 232 if (io_timer_.IsRunning() && | |
| 233 (num_buffers_at_codec_ == 0 && pending_frames_.empty())) { | |
| 234 io_timer_.Stop(); | |
| 235 } | |
| 236 } | |
| 237 | |
| 238 void AndroidVideoEncodeAccelerator::Encode( | |
| 239 const scoped_refptr<VideoFrame>& frame, | |
| 240 bool force_keyframe) { | |
| 241 DVLOG(3) << __PRETTY_FUNCTION__ << ": " << force_keyframe; | |
| 242 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 243 RETURN_ON_FAILURE(frame->format() == media::PIXEL_FORMAT_I420, | |
| 244 "Unexpected format", kInvalidArgumentError); | |
| 245 RETURN_ON_FAILURE(frame->visible_rect().size() == frame_size_, | |
| 246 "Unexpected resolution", kInvalidArgumentError); | |
| 247 // MediaCodec doesn't have a way to specify stride for non-Packed formats, so | |
| 248 // we insist on being called with packed frames and no cropping :( | |
| 249 RETURN_ON_FAILURE(frame->row_bytes(VideoFrame::kYPlane) == | |
| 250 frame->stride(VideoFrame::kYPlane) && | |
| 251 frame->row_bytes(VideoFrame::kUPlane) == | |
| 252 frame->stride(VideoFrame::kUPlane) && | |
| 253 frame->row_bytes(VideoFrame::kVPlane) == | |
| 254 frame->stride(VideoFrame::kVPlane) && | |
| 255 frame->coded_size() == frame->visible_rect().size(), | |
| 256 "Non-packed frame, or visible_rect != coded_size", | |
| 257 kInvalidArgumentError); | |
| 258 | |
| 259 pending_frames_.push( | |
| 260 base::MakeTuple(frame, force_keyframe, base::Time::Now())); | |
| 261 DoIOTask(); | |
| 262 } | |
| 263 | |
| 264 void AndroidVideoEncodeAccelerator::UseOutputBitstreamBuffer( | |
| 265 const media::BitstreamBuffer& buffer) { | |
| 266 DVLOG(3) << __PRETTY_FUNCTION__ << ": bitstream_buffer_id=" << buffer.id(); | |
| 267 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 268 available_bitstream_buffers_.push_back(buffer); | |
| 269 DoIOTask(); | |
| 270 } | |
| 271 | |
| 272 void AndroidVideoEncodeAccelerator::RequestEncodingParametersChange( | |
| 273 uint32_t bitrate, | |
| 274 uint32_t framerate) { | |
| 275 DVLOG(3) << __PRETTY_FUNCTION__ << ": bitrate: " << bitrate | |
| 276 << ", framerate: " << framerate; | |
| 277 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 278 if (bitrate != last_set_bitrate_) { | |
| 279 last_set_bitrate_ = bitrate; | |
| 280 media_codec_->SetVideoBitrate(bitrate); | |
| 281 } | |
| 282 // Note: Android's MediaCodec doesn't allow mid-stream adjustments to | |
| 283 // framerate, so we ignore that here. This is OK because Android only uses | |
| 284 // the framerate value from MediaFormat during configure() as a proxy for | |
| 285 // bitrate, and we set that explicitly. | |
| 286 } | |
| 287 | |
| 288 void AndroidVideoEncodeAccelerator::Destroy() { | |
| 289 DVLOG(3) << __PRETTY_FUNCTION__; | |
| 290 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 291 client_ptr_factory_.reset(); | |
| 292 if (media_codec_) { | |
| 293 if (io_timer_.IsRunning()) | |
| 294 io_timer_.Stop(); | |
| 295 media_codec_->Stop(); | |
| 296 } | |
| 297 delete this; | |
| 298 } | |
| 299 | |
| 300 void AndroidVideoEncodeAccelerator::DoIOTask() { | |
| 301 QueueInput(); | |
| 302 DequeueOutput(); | |
| 303 MaybeStartIOTimer(); | |
| 304 MaybeStopIOTimer(); | |
| 305 } | |
| 306 | |
| 307 void AndroidVideoEncodeAccelerator::QueueInput() { | |
| 308 if (!client_ptr_factory_->GetWeakPtr() || pending_frames_.empty()) | |
| 309 return; | |
| 310 | |
| 311 int input_buf_index = 0; | |
| 312 media::MediaCodecStatus status = | |
| 313 media_codec_->DequeueInputBuffer(NoWaitTimeOut(), &input_buf_index); | |
| 314 if (status != media::MEDIA_CODEC_OK) { | |
| 315 DCHECK(status == media::MEDIA_CODEC_DEQUEUE_INPUT_AGAIN_LATER || | |
| 316 status == media::MEDIA_CODEC_ERROR); | |
| 317 RETURN_ON_FAILURE(status != media::MEDIA_CODEC_ERROR, | |
| 318 "MediaCodec error", | |
| 319 kPlatformFailureError); | |
| 320 return; | |
| 321 } | |
| 322 | |
| 323 const PendingFrames::value_type& input = pending_frames_.front(); | |
| 324 bool is_key_frame = base::get<1>(input); | |
| 325 if (is_key_frame) { | |
| 326 // Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could | |
| 327 // indicate this in the QueueInputBuffer() call below and guarantee _this_ | |
| 328 // frame be encoded as a key frame, but sadly that flag is ignored. | |
| 329 // Instead, we request a key frame "soon". | |
| 330 media_codec_->RequestKeyFrameSoon(); | |
| 331 } | |
| 332 scoped_refptr<VideoFrame> frame = base::get<0>(input); | |
| 333 | |
| 334 uint8_t* buffer = NULL; | |
| 335 size_t capacity = 0; | |
| 336 status = media_codec_->GetInputBuffer(input_buf_index, &buffer, &capacity); | |
| 337 RETURN_ON_FAILURE(status == media::MEDIA_CODEC_OK, "GetInputBuffer failed.", | |
| 338 kPlatformFailureError); | |
| 339 | |
| 340 size_t queued_size = | |
| 341 VideoFrame::AllocationSize(media::PIXEL_FORMAT_I420, frame->coded_size()); | |
| 342 RETURN_ON_FAILURE(capacity >= queued_size, | |
| 343 "Failed to get input buffer: " << input_buf_index, | |
| 344 kPlatformFailureError); | |
| 345 | |
| 346 uint8_t* dst_y = buffer; | |
| 347 int dst_stride_y = frame->stride(VideoFrame::kYPlane); | |
| 348 uint8_t* dst_uv = | |
| 349 buffer + | |
| 350 frame->stride(VideoFrame::kYPlane) * frame->rows(VideoFrame::kYPlane); | |
| 351 int dst_stride_uv = frame->stride(VideoFrame::kUPlane) * 2; | |
| 352 // Why NV12? Because COLOR_FORMAT_YUV420_SEMIPLANAR. See comment at other | |
| 353 // mention of that constant. | |
| 354 bool converted = !libyuv::I420ToNV12(frame->data(VideoFrame::kYPlane), | |
| 355 frame->stride(VideoFrame::kYPlane), | |
| 356 frame->data(VideoFrame::kUPlane), | |
| 357 frame->stride(VideoFrame::kUPlane), | |
| 358 frame->data(VideoFrame::kVPlane), | |
| 359 frame->stride(VideoFrame::kVPlane), | |
| 360 dst_y, | |
| 361 dst_stride_y, | |
| 362 dst_uv, | |
| 363 dst_stride_uv, | |
| 364 frame->coded_size().width(), | |
| 365 frame->coded_size().height()); | |
| 366 RETURN_ON_FAILURE(converted, "Failed to I420ToNV12!", kPlatformFailureError); | |
| 367 | |
| 368 fake_input_timestamp_ += base::TimeDelta::FromMicroseconds(1); | |
| 369 status = media_codec_->QueueInputBuffer( | |
| 370 input_buf_index, NULL, queued_size, fake_input_timestamp_); | |
| 371 UMA_HISTOGRAM_TIMES("Media.AVEA.InputQueueTime", | |
| 372 base::Time::Now() - base::get<2>(input)); | |
| 373 RETURN_ON_FAILURE(status == media::MEDIA_CODEC_OK, | |
| 374 "Failed to QueueInputBuffer: " << status, | |
| 375 kPlatformFailureError); | |
| 376 ++num_buffers_at_codec_; | |
| 377 pending_frames_.pop(); | |
| 378 } | |
| 379 | |
| 380 void AndroidVideoEncodeAccelerator::DequeueOutput() { | |
| 381 if (!client_ptr_factory_->GetWeakPtr() || | |
| 382 available_bitstream_buffers_.empty() || num_buffers_at_codec_ == 0) { | |
| 383 return; | |
| 384 } | |
| 385 | |
| 386 int32_t buf_index = 0; | |
| 387 size_t offset = 0; | |
| 388 size_t size = 0; | |
| 389 bool key_frame = false; | |
| 390 do { | |
| 391 media::MediaCodecStatus status = media_codec_->DequeueOutputBuffer( | |
| 392 NoWaitTimeOut(), &buf_index, &offset, &size, NULL, NULL, &key_frame); | |
| 393 switch (status) { | |
| 394 case media::MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER: | |
| 395 return; | |
| 396 | |
| 397 case media::MEDIA_CODEC_ERROR: | |
| 398 RETURN_ON_FAILURE(false, "Codec error", kPlatformFailureError); | |
| 399 // Unreachable because of previous statement, but included for clarity. | |
| 400 return; | |
| 401 | |
| 402 case media::MEDIA_CODEC_OUTPUT_FORMAT_CHANGED: | |
| 403 RETURN_ON_FAILURE(false, "Unexpected output format change", | |
| 404 kPlatformFailureError); | |
| 405 break; | |
| 406 | |
| 407 case media::MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED: | |
| 408 break; | |
| 409 | |
| 410 case media::MEDIA_CODEC_OK: | |
| 411 DCHECK_GE(buf_index, 0); | |
| 412 break; | |
| 413 | |
| 414 default: | |
| 415 NOTREACHED(); | |
| 416 break; | |
| 417 } | |
| 418 } while (buf_index < 0); | |
| 419 | |
| 420 media::BitstreamBuffer bitstream_buffer = available_bitstream_buffers_.back(); | |
| 421 available_bitstream_buffers_.pop_back(); | |
| 422 std::unique_ptr<SharedMemoryRegion> shm( | |
| 423 new SharedMemoryRegion(bitstream_buffer, false)); | |
| 424 RETURN_ON_FAILURE(shm->Map(), "Failed to map SHM", kPlatformFailureError); | |
| 425 RETURN_ON_FAILURE(size <= shm->size(), | |
| 426 "Encoded buffer too large: " << size << ">" << shm->size(), | |
| 427 kPlatformFailureError); | |
| 428 | |
| 429 media::MediaCodecStatus status = media_codec_->CopyFromOutputBuffer( | |
| 430 buf_index, offset, shm->memory(), size); | |
| 431 RETURN_ON_FAILURE(status == media::MEDIA_CODEC_OK, | |
| 432 "CopyFromOutputBuffer failed", kPlatformFailureError); | |
| 433 media_codec_->ReleaseOutputBuffer(buf_index, false); | |
| 434 --num_buffers_at_codec_; | |
| 435 | |
| 436 UMA_HISTOGRAM_COUNTS_10000("Media.AVEA.EncodedBufferSizeKB", size / 1024); | |
| 437 base::MessageLoop::current()->PostTask( | |
| 438 FROM_HERE, | |
| 439 base::Bind(&VideoEncodeAccelerator::Client::BitstreamBufferReady, | |
| 440 client_ptr_factory_->GetWeakPtr(), | |
| 441 bitstream_buffer.id(), | |
| 442 size, | |
| 443 key_frame)); | |
| 444 } | |
| 445 | |
| 446 } // namespace content | |
| OLD | NEW |