| OLD | NEW |
| 1 // Copyright 2013 The Chromium Authors. All rights reserved. | 1 // Copyright 2013 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "content/common/gpu/media/android_video_encode_accelerator.h" | 5 #include "media/gpu/android_video_encode_accelerator.h" |
| 6 | 6 |
| 7 #include <memory> | 7 #include <memory> |
| 8 #include <set> | 8 #include <set> |
| 9 | 9 |
| 10 #include "base/bind.h" | 10 #include "base/bind.h" |
| 11 #include "base/logging.h" | 11 #include "base/logging.h" |
| 12 #include "base/message_loop/message_loop.h" | 12 #include "base/message_loop/message_loop.h" |
| 13 #include "base/metrics/histogram.h" | 13 #include "base/metrics/histogram.h" |
| 14 #include "content/common/gpu/media/shared_memory_region.h" | |
| 15 #include "gpu/command_buffer/service/gles2_cmd_decoder.h" | 14 #include "gpu/command_buffer/service/gles2_cmd_decoder.h" |
| 16 #include "gpu/ipc/service/gpu_channel.h" | 15 #include "gpu/ipc/service/gpu_channel.h" |
| 17 #include "media/base/android/media_codec_util.h" | 16 #include "media/base/android/media_codec_util.h" |
| 18 #include "media/base/bitstream_buffer.h" | 17 #include "media/base/bitstream_buffer.h" |
| 19 #include "media/base/limits.h" | 18 #include "media/base/limits.h" |
| 19 #include "media/gpu/shared_memory_region.h" |
| 20 #include "media/video/picture.h" | 20 #include "media/video/picture.h" |
| 21 #include "third_party/libyuv/include/libyuv/convert_from.h" | 21 #include "third_party/libyuv/include/libyuv/convert_from.h" |
| 22 #include "ui/gl/android/scoped_java_surface.h" | 22 #include "ui/gl/android/scoped_java_surface.h" |
| 23 #include "ui/gl/gl_bindings.h" | 23 #include "ui/gl/gl_bindings.h" |
| 24 | 24 |
| 25 using media::VideoCodecBridge; | 25 using media::VideoCodecBridge; |
| 26 using media::VideoFrame; | 26 using media::VideoFrame; |
| 27 | 27 |
| 28 namespace content { | 28 namespace media { |
| 29 | 29 |
| 30 // Limit default max video codec size for Android to avoid | 30 // Limit default max video codec size for Android to avoid |
| 31 // HW codec initialization failure for resolution higher than 720p. | 31 // HW codec initialization failure for resolution higher than 720p. |
| 32 // Default values are from Libjingle "jsepsessiondescription.cc". | 32 // Default values are from Libjingle "jsepsessiondescription.cc". |
| 33 const int kMaxEncodeFrameWidth = 1280; | 33 const int kMaxEncodeFrameWidth = 1280; |
| 34 const int kMaxEncodeFrameHeight = 720; | 34 const int kMaxEncodeFrameHeight = 720; |
| 35 const int kMaxFramerateNumerator = 30; | 35 const int kMaxFramerateNumerator = 30; |
| 36 const int kMaxFramerateDenominator = 1; | 36 const int kMaxFramerateDenominator = 1; |
| 37 | 37 |
| 38 enum PixelFormat { | 38 enum PixelFormat { |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 87 *pixel_format = COLOR_FORMAT_YUV420_SEMIPLANAR; | 87 *pixel_format = COLOR_FORMAT_YUV420_SEMIPLANAR; |
| 88 else if (formats.count(COLOR_FORMAT_YUV420_PLANAR) > 0) | 88 else if (formats.count(COLOR_FORMAT_YUV420_PLANAR) > 0) |
| 89 *pixel_format = COLOR_FORMAT_YUV420_PLANAR; | 89 *pixel_format = COLOR_FORMAT_YUV420_PLANAR; |
| 90 else | 90 else |
| 91 return false; | 91 return false; |
| 92 | 92 |
| 93 return true; | 93 return true; |
| 94 } | 94 } |
| 95 | 95 |
| 96 AndroidVideoEncodeAccelerator::AndroidVideoEncodeAccelerator() | 96 AndroidVideoEncodeAccelerator::AndroidVideoEncodeAccelerator() |
| 97 : num_buffers_at_codec_(0), | 97 : num_buffers_at_codec_(0), last_set_bitrate_(0) {} |
| 98 last_set_bitrate_(0) {} | |
| 99 | 98 |
| 100 AndroidVideoEncodeAccelerator::~AndroidVideoEncodeAccelerator() { | 99 AndroidVideoEncodeAccelerator::~AndroidVideoEncodeAccelerator() { |
| 101 DCHECK(thread_checker_.CalledOnValidThread()); | 100 DCHECK(thread_checker_.CalledOnValidThread()); |
| 102 } | 101 } |
| 103 | 102 |
| 104 media::VideoEncodeAccelerator::SupportedProfiles | 103 media::VideoEncodeAccelerator::SupportedProfiles |
| 105 AndroidVideoEncodeAccelerator::GetSupportedProfiles() { | 104 AndroidVideoEncodeAccelerator::GetSupportedProfiles() { |
| 106 SupportedProfiles profiles; | 105 SupportedProfiles profiles; |
| 107 | 106 |
| 108 const struct { | 107 const struct { |
| 109 const media::VideoCodec codec; | 108 const media::VideoCodec codec; |
| 110 const media::VideoCodecProfile profile; | 109 const media::VideoCodecProfile profile; |
| 111 } kSupportedCodecs[] = { | 110 } kSupportedCodecs[] = {{media::kCodecVP8, media::VP8PROFILE_ANY}, |
| 112 { media::kCodecVP8, media::VP8PROFILE_ANY }, | 111 {media::kCodecH264, media::H264PROFILE_BASELINE}, |
| 113 { media::kCodecH264, media::H264PROFILE_BASELINE }, | 112 {media::kCodecH264, media::H264PROFILE_MAIN}}; |
| 114 { media::kCodecH264, media::H264PROFILE_MAIN } | |
| 115 }; | |
| 116 | 113 |
| 117 for (const auto& supported_codec : kSupportedCodecs) { | 114 for (const auto& supported_codec : kSupportedCodecs) { |
| 118 if (supported_codec.codec == media::kCodecVP8 && | 115 if (supported_codec.codec == media::kCodecVP8 && |
| 119 !media::MediaCodecUtil::IsVp8EncoderAvailable()) { | 116 !media::MediaCodecUtil::IsVp8EncoderAvailable()) { |
| 120 continue; | 117 continue; |
| 121 } | 118 } |
| 122 | 119 |
| 123 if (VideoCodecBridge::IsKnownUnaccelerated(supported_codec.codec, | 120 if (VideoCodecBridge::IsKnownUnaccelerated(supported_codec.codec, |
| 124 media::MEDIA_CODEC_ENCODER)) { | 121 media::MEDIA_CODEC_ENCODER)) { |
| 125 continue; | 122 continue; |
| 126 } | 123 } |
| 127 | 124 |
| 128 SupportedProfile profile; | 125 SupportedProfile profile; |
| 129 profile.profile = supported_codec.profile; | 126 profile.profile = supported_codec.profile; |
| 130 // It would be nice if MediaCodec exposes the maximum capabilities of | 127 // It would be nice if MediaCodec exposes the maximum capabilities of |
| 131 // the encoder. Hard-code some reasonable defaults as workaround. | 128 // the encoder. Hard-code some reasonable defaults as workaround. |
| 132 profile.max_resolution.SetSize(kMaxEncodeFrameWidth, | 129 profile.max_resolution.SetSize(kMaxEncodeFrameWidth, kMaxEncodeFrameHeight); |
| 133 kMaxEncodeFrameHeight); | |
| 134 profile.max_framerate_numerator = kMaxFramerateNumerator; | 130 profile.max_framerate_numerator = kMaxFramerateNumerator; |
| 135 profile.max_framerate_denominator = kMaxFramerateDenominator; | 131 profile.max_framerate_denominator = kMaxFramerateDenominator; |
| 136 profiles.push_back(profile); | 132 profiles.push_back(profile); |
| 137 } | 133 } |
| 138 return profiles; | 134 return profiles; |
| 139 } | 135 } |
| 140 | 136 |
| 141 bool AndroidVideoEncodeAccelerator::Initialize( | 137 bool AndroidVideoEncodeAccelerator::Initialize( |
| 142 media::VideoPixelFormat format, | 138 media::VideoPixelFormat format, |
| 143 const gfx::Size& input_visible_size, | 139 const gfx::Size& input_visible_size, |
| (...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 186 codec, media::MEDIA_CODEC_ENCODER)) { | 182 codec, media::MEDIA_CODEC_ENCODER)) { |
| 187 DLOG(ERROR) << "No HW support"; | 183 DLOG(ERROR) << "No HW support"; |
| 188 return false; | 184 return false; |
| 189 } | 185 } |
| 190 | 186 |
| 191 PixelFormat pixel_format = COLOR_FORMAT_YUV420_SEMIPLANAR; | 187 PixelFormat pixel_format = COLOR_FORMAT_YUV420_SEMIPLANAR; |
| 192 if (!GetSupportedColorFormatForMime(mime_type, &pixel_format)) { | 188 if (!GetSupportedColorFormatForMime(mime_type, &pixel_format)) { |
| 193 DLOG(ERROR) << "No color format support."; | 189 DLOG(ERROR) << "No color format support."; |
| 194 return false; | 190 return false; |
| 195 } | 191 } |
| 196 media_codec_.reset(media::VideoCodecBridge::CreateEncoder(codec, | 192 media_codec_.reset(media::VideoCodecBridge::CreateEncoder( |
| 197 input_visible_size, | 193 codec, input_visible_size, initial_bitrate, INITIAL_FRAMERATE, |
| 198 initial_bitrate, | 194 IFRAME_INTERVAL, pixel_format)); |
| 199 INITIAL_FRAMERATE, | |
| 200 IFRAME_INTERVAL, | |
| 201 pixel_format)); | |
| 202 | 195 |
| 203 if (!media_codec_) { | 196 if (!media_codec_) { |
| 204 DLOG(ERROR) << "Failed to create/start the codec: " | 197 DLOG(ERROR) << "Failed to create/start the codec: " |
| 205 << input_visible_size.ToString(); | 198 << input_visible_size.ToString(); |
| 206 return false; | 199 return false; |
| 207 } | 200 } |
| 208 | 201 |
| 209 // Conservative upper bound for output buffer size: decoded size + 2KB. | 202 // Conservative upper bound for output buffer size: decoded size + 2KB. |
| 210 const size_t output_buffer_capacity = | 203 const size_t output_buffer_capacity = |
| 211 VideoFrame::AllocationSize(format, input_visible_size) + 2048; | 204 VideoFrame::AllocationSize(format, input_visible_size) + 2048; |
| 212 base::MessageLoop::current()->PostTask( | 205 base::MessageLoop::current()->PostTask( |
| 213 FROM_HERE, | 206 FROM_HERE, |
| 214 base::Bind(&VideoEncodeAccelerator::Client::RequireBitstreamBuffers, | 207 base::Bind(&VideoEncodeAccelerator::Client::RequireBitstreamBuffers, |
| 215 client_ptr_factory_->GetWeakPtr(), | 208 client_ptr_factory_->GetWeakPtr(), frame_input_count, |
| 216 frame_input_count, | 209 input_visible_size, output_buffer_capacity)); |
| 217 input_visible_size, | |
| 218 output_buffer_capacity)); | |
| 219 return true; | 210 return true; |
| 220 } | 211 } |
| 221 | 212 |
| 222 void AndroidVideoEncodeAccelerator::MaybeStartIOTimer() { | 213 void AndroidVideoEncodeAccelerator::MaybeStartIOTimer() { |
| 223 if (!io_timer_.IsRunning() && | 214 if (!io_timer_.IsRunning() && |
| 224 (num_buffers_at_codec_ > 0 || !pending_frames_.empty())) { | 215 (num_buffers_at_codec_ > 0 || !pending_frames_.empty())) { |
| 225 io_timer_.Start(FROM_HERE, | 216 io_timer_.Start(FROM_HERE, EncodePollDelay(), this, |
| 226 EncodePollDelay(), | |
| 227 this, | |
| 228 &AndroidVideoEncodeAccelerator::DoIOTask); | 217 &AndroidVideoEncodeAccelerator::DoIOTask); |
| 229 } | 218 } |
| 230 } | 219 } |
| 231 | 220 |
| 232 void AndroidVideoEncodeAccelerator::MaybeStopIOTimer() { | 221 void AndroidVideoEncodeAccelerator::MaybeStopIOTimer() { |
| 233 if (io_timer_.IsRunning() && | 222 if (io_timer_.IsRunning() && |
| 234 (num_buffers_at_codec_ == 0 && pending_frames_.empty())) { | 223 (num_buffers_at_codec_ == 0 && pending_frames_.empty())) { |
| 235 io_timer_.Stop(); | 224 io_timer_.Stop(); |
| 236 } | 225 } |
| 237 } | 226 } |
| (...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 308 void AndroidVideoEncodeAccelerator::QueueInput() { | 297 void AndroidVideoEncodeAccelerator::QueueInput() { |
| 309 if (!client_ptr_factory_->GetWeakPtr() || pending_frames_.empty()) | 298 if (!client_ptr_factory_->GetWeakPtr() || pending_frames_.empty()) |
| 310 return; | 299 return; |
| 311 | 300 |
| 312 int input_buf_index = 0; | 301 int input_buf_index = 0; |
| 313 media::MediaCodecStatus status = | 302 media::MediaCodecStatus status = |
| 314 media_codec_->DequeueInputBuffer(NoWaitTimeOut(), &input_buf_index); | 303 media_codec_->DequeueInputBuffer(NoWaitTimeOut(), &input_buf_index); |
| 315 if (status != media::MEDIA_CODEC_OK) { | 304 if (status != media::MEDIA_CODEC_OK) { |
| 316 DCHECK(status == media::MEDIA_CODEC_DEQUEUE_INPUT_AGAIN_LATER || | 305 DCHECK(status == media::MEDIA_CODEC_DEQUEUE_INPUT_AGAIN_LATER || |
| 317 status == media::MEDIA_CODEC_ERROR); | 306 status == media::MEDIA_CODEC_ERROR); |
| 318 RETURN_ON_FAILURE(status != media::MEDIA_CODEC_ERROR, | 307 RETURN_ON_FAILURE(status != media::MEDIA_CODEC_ERROR, "MediaCodec error", |
| 319 "MediaCodec error", | |
| 320 kPlatformFailureError); | 308 kPlatformFailureError); |
| 321 return; | 309 return; |
| 322 } | 310 } |
| 323 | 311 |
| 324 const PendingFrames::value_type& input = pending_frames_.front(); | 312 const PendingFrames::value_type& input = pending_frames_.front(); |
| 325 bool is_key_frame = base::get<1>(input); | 313 bool is_key_frame = base::get<1>(input); |
| 326 if (is_key_frame) { | 314 if (is_key_frame) { |
| 327 // Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could | 315 // Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could |
| 328 // indicate this in the QueueInputBuffer() call below and guarantee _this_ | 316 // indicate this in the QueueInputBuffer() call below and guarantee _this_ |
| 329 // frame be encoded as a key frame, but sadly that flag is ignored. | 317 // frame be encoded as a key frame, but sadly that flag is ignored. |
| (...skipping 15 matching lines...) Expand all Loading... |
| 345 kPlatformFailureError); | 333 kPlatformFailureError); |
| 346 | 334 |
| 347 uint8_t* dst_y = buffer; | 335 uint8_t* dst_y = buffer; |
| 348 int dst_stride_y = frame->stride(VideoFrame::kYPlane); | 336 int dst_stride_y = frame->stride(VideoFrame::kYPlane); |
| 349 uint8_t* dst_uv = | 337 uint8_t* dst_uv = |
| 350 buffer + | 338 buffer + |
| 351 frame->stride(VideoFrame::kYPlane) * frame->rows(VideoFrame::kYPlane); | 339 frame->stride(VideoFrame::kYPlane) * frame->rows(VideoFrame::kYPlane); |
| 352 int dst_stride_uv = frame->stride(VideoFrame::kUPlane) * 2; | 340 int dst_stride_uv = frame->stride(VideoFrame::kUPlane) * 2; |
| 353 // Why NV12? Because COLOR_FORMAT_YUV420_SEMIPLANAR. See comment at other | 341 // Why NV12? Because COLOR_FORMAT_YUV420_SEMIPLANAR. See comment at other |
| 354 // mention of that constant. | 342 // mention of that constant. |
| 355 bool converted = !libyuv::I420ToNV12(frame->data(VideoFrame::kYPlane), | 343 bool converted = !libyuv::I420ToNV12( |
| 356 frame->stride(VideoFrame::kYPlane), | 344 frame->data(VideoFrame::kYPlane), frame->stride(VideoFrame::kYPlane), |
| 357 frame->data(VideoFrame::kUPlane), | 345 frame->data(VideoFrame::kUPlane), frame->stride(VideoFrame::kUPlane), |
| 358 frame->stride(VideoFrame::kUPlane), | 346 frame->data(VideoFrame::kVPlane), frame->stride(VideoFrame::kVPlane), |
| 359 frame->data(VideoFrame::kVPlane), | 347 dst_y, dst_stride_y, dst_uv, dst_stride_uv, frame->coded_size().width(), |
| 360 frame->stride(VideoFrame::kVPlane), | 348 frame->coded_size().height()); |
| 361 dst_y, | |
| 362 dst_stride_y, | |
| 363 dst_uv, | |
| 364 dst_stride_uv, | |
| 365 frame->coded_size().width(), | |
| 366 frame->coded_size().height()); | |
| 367 RETURN_ON_FAILURE(converted, "Failed to I420ToNV12!", kPlatformFailureError); | 349 RETURN_ON_FAILURE(converted, "Failed to I420ToNV12!", kPlatformFailureError); |
| 368 | 350 |
| 369 fake_input_timestamp_ += base::TimeDelta::FromMicroseconds(1); | 351 fake_input_timestamp_ += base::TimeDelta::FromMicroseconds(1); |
| 370 status = media_codec_->QueueInputBuffer( | 352 status = media_codec_->QueueInputBuffer(input_buf_index, NULL, queued_size, |
| 371 input_buf_index, NULL, queued_size, fake_input_timestamp_); | 353 fake_input_timestamp_); |
| 372 UMA_HISTOGRAM_TIMES("Media.AVEA.InputQueueTime", | 354 UMA_HISTOGRAM_TIMES("Media.AVDA.InputQueueTime", |
| 373 base::Time::Now() - base::get<2>(input)); | 355 base::Time::Now() - base::get<2>(input)); |
| 374 RETURN_ON_FAILURE(status == media::MEDIA_CODEC_OK, | 356 RETURN_ON_FAILURE(status == media::MEDIA_CODEC_OK, |
| 375 "Failed to QueueInputBuffer: " << status, | 357 "Failed to QueueInputBuffer: " << status, |
| 376 kPlatformFailureError); | 358 kPlatformFailureError); |
| 377 ++num_buffers_at_codec_; | 359 ++num_buffers_at_codec_; |
| 378 pending_frames_.pop(); | 360 pending_frames_.pop(); |
| 379 } | 361 } |
| 380 | 362 |
| 381 void AndroidVideoEncodeAccelerator::DequeueOutput() { | 363 void AndroidVideoEncodeAccelerator::DequeueOutput() { |
| 382 if (!client_ptr_factory_->GetWeakPtr() || | 364 if (!client_ptr_factory_->GetWeakPtr() || |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 427 "Encoded buffer too large: " << size << ">" << shm->size(), | 409 "Encoded buffer too large: " << size << ">" << shm->size(), |
| 428 kPlatformFailureError); | 410 kPlatformFailureError); |
| 429 | 411 |
| 430 media::MediaCodecStatus status = media_codec_->CopyFromOutputBuffer( | 412 media::MediaCodecStatus status = media_codec_->CopyFromOutputBuffer( |
| 431 buf_index, offset, shm->memory(), size); | 413 buf_index, offset, shm->memory(), size); |
| 432 RETURN_ON_FAILURE(status == media::MEDIA_CODEC_OK, | 414 RETURN_ON_FAILURE(status == media::MEDIA_CODEC_OK, |
| 433 "CopyFromOutputBuffer failed", kPlatformFailureError); | 415 "CopyFromOutputBuffer failed", kPlatformFailureError); |
| 434 media_codec_->ReleaseOutputBuffer(buf_index, false); | 416 media_codec_->ReleaseOutputBuffer(buf_index, false); |
| 435 --num_buffers_at_codec_; | 417 --num_buffers_at_codec_; |
| 436 | 418 |
| 437 UMA_HISTOGRAM_COUNTS_10000("Media.AVEA.EncodedBufferSizeKB", size / 1024); | |
| 438 base::MessageLoop::current()->PostTask( | 419 base::MessageLoop::current()->PostTask( |
| 439 FROM_HERE, | 420 FROM_HERE, |
| 440 base::Bind(&VideoEncodeAccelerator::Client::BitstreamBufferReady, | 421 base::Bind(&VideoEncodeAccelerator::Client::BitstreamBufferReady, |
| 441 client_ptr_factory_->GetWeakPtr(), | 422 client_ptr_factory_->GetWeakPtr(), bitstream_buffer.id(), size, |
| 442 bitstream_buffer.id(), | |
| 443 size, | |
| 444 key_frame)); | 423 key_frame)); |
| 445 } | 424 } |
| 446 | 425 |
| 447 } // namespace content | 426 } // namespace media |
| OLD | NEW |