Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(49)

Side by Side Diff: media/gpu/android_video_encode_accelerator.cc

Issue 2358683002: Android: enable/disable WebRTC HW H264 with a flag. (Closed)
Patch Set: expanding existing switch flag instead of add one more feature Created 4 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2013 The Chromium Authors. All rights reserved. 1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "media/gpu/android_video_encode_accelerator.h" 5 #include "media/gpu/android_video_encode_accelerator.h"
6 6
7 #include <memory> 7 #include <memory>
8 #include <set> 8 #include <set>
9 #include <tuple> 9 #include <tuple>
10 10
(...skipping 17 matching lines...) Expand all
28 namespace media { 28 namespace media {
29 29
30 // Limit default max video codec size for Android to avoid 30 // Limit default max video codec size for Android to avoid
31 // HW codec initialization failure for resolution higher than 720p. 31 // HW codec initialization failure for resolution higher than 720p.
32 // Default values are from Libjingle "jsepsessiondescription.cc". 32 // Default values are from Libjingle "jsepsessiondescription.cc".
33 const int kMaxEncodeFrameWidth = 1280; 33 const int kMaxEncodeFrameWidth = 1280;
34 const int kMaxEncodeFrameHeight = 720; 34 const int kMaxEncodeFrameHeight = 720;
35 const int kMaxFramerateNumerator = 30; 35 const int kMaxFramerateNumerator = 30;
36 const int kMaxFramerateDenominator = 1; 36 const int kMaxFramerateDenominator = 1;
37 37
38 const int64_t kNumMicrosecsPerSec = INT64_C(1000000);
DaleCurtis 2016/09/29 00:31:36 INT64_C necessary?
braveyao 2016/09/29 19:26:26 Done.
39
38 enum PixelFormat { 40 enum PixelFormat {
39 // Subset of MediaCodecInfo.CodecCapabilities. 41 // Subset of MediaCodecInfo.CodecCapabilities.
40 COLOR_FORMAT_YUV420_PLANAR = 19, 42 COLOR_FORMAT_YUV420_PLANAR = 19,
41 COLOR_FORMAT_YUV420_SEMIPLANAR = 21, 43 COLOR_FORMAT_YUV420_SEMIPLANAR = 21,
42 }; 44 };
43 45
44 // Helper macros for dealing with failure. If |result| evaluates false, emit 46 // Helper macros for dealing with failure. If |result| evaluates false, emit
45 // |log| to DLOG(ERROR), register |error| with the client, and return. 47 // |log| to DLOG(ERROR), register |error| with the client, and return.
46 #define RETURN_ON_FAILURE(result, log, error) \ 48 #define RETURN_ON_FAILURE(result, log, error) \
47 do { \ 49 do { \
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
101 } 103 }
102 104
103 VideoEncodeAccelerator::SupportedProfiles 105 VideoEncodeAccelerator::SupportedProfiles
104 AndroidVideoEncodeAccelerator::GetSupportedProfiles() { 106 AndroidVideoEncodeAccelerator::GetSupportedProfiles() {
105 SupportedProfiles profiles; 107 SupportedProfiles profiles;
106 108
107 const struct { 109 const struct {
108 const VideoCodec codec; 110 const VideoCodec codec;
109 const VideoCodecProfile profile; 111 const VideoCodecProfile profile;
110 } kSupportedCodecs[] = {{kCodecVP8, VP8PROFILE_ANY}, 112 } kSupportedCodecs[] = {{kCodecVP8, VP8PROFILE_ANY},
111 {kCodecH264, H264PROFILE_BASELINE}, 113 {kCodecH264, H264PROFILE_BASELINE}};
112 {kCodecH264, H264PROFILE_MAIN}};
113 114
114 for (const auto& supported_codec : kSupportedCodecs) { 115 for (const auto& supported_codec : kSupportedCodecs) {
115 if (supported_codec.codec == kCodecVP8 && 116 if (supported_codec.codec == kCodecVP8 &&
116 !MediaCodecUtil::IsVp8EncoderAvailable()) { 117 !MediaCodecUtil::IsVp8EncoderAvailable()) {
117 continue; 118 continue;
118 } 119 }
119 120
121 if (supported_codec.codec == kCodecH264 &&
122 !MediaCodecUtil::IsH264EncoderAvailable()) {
123 continue;
124 }
125
120 if (VideoCodecBridge::IsKnownUnaccelerated(supported_codec.codec, 126 if (VideoCodecBridge::IsKnownUnaccelerated(supported_codec.codec,
121 MEDIA_CODEC_ENCODER)) { 127 MEDIA_CODEC_ENCODER)) {
122 continue; 128 continue;
123 } 129 }
124 130
125 SupportedProfile profile; 131 SupportedProfile profile;
126 profile.profile = supported_codec.profile; 132 profile.profile = supported_codec.profile;
127 // It would be nice if MediaCodec exposes the maximum capabilities of 133 // It would be nice if MediaCodec exposes the maximum capabilities of
128 // the encoder. Hard-code some reasonable defaults as workaround. 134 // the encoder. Hard-code some reasonable defaults as workaround.
129 profile.max_resolution.SetSize(kMaxEncodeFrameWidth, kMaxEncodeFrameHeight); 135 profile.max_resolution.SetSize(kMaxEncodeFrameWidth, kMaxEncodeFrameHeight);
(...skipping 24 matching lines...) Expand all
154 DLOG(ERROR) << "Unexpected combo: " << format << ", " << output_profile; 160 DLOG(ERROR) << "Unexpected combo: " << format << ", " << output_profile;
155 return false; 161 return false;
156 } 162 }
157 163
158 std::string mime_type; 164 std::string mime_type;
159 VideoCodec codec; 165 VideoCodec codec;
160 // The client should be prepared to feed at least this many frames into the 166 // The client should be prepared to feed at least this many frames into the
161 // encoder before being returned any output frames, since the encoder may 167 // encoder before being returned any output frames, since the encoder may
162 // need to hold onto some subset of inputs as reference pictures. 168 // need to hold onto some subset of inputs as reference pictures.
163 uint32_t frame_input_count; 169 uint32_t frame_input_count;
170 uint32_t i_frame_interval;
164 if (output_profile == VP8PROFILE_ANY) { 171 if (output_profile == VP8PROFILE_ANY) {
165 codec = kCodecVP8; 172 codec = kCodecVP8;
166 mime_type = "video/x-vnd.on2.vp8"; 173 mime_type = "video/x-vnd.on2.vp8";
167 frame_input_count = 1; 174 frame_input_count = 1;
175 i_frame_interval = IFRAME_INTERVAL_VPX;
168 } else if (output_profile == H264PROFILE_BASELINE || 176 } else if (output_profile == H264PROFILE_BASELINE ||
169 output_profile == H264PROFILE_MAIN) { 177 output_profile == H264PROFILE_MAIN) {
170 codec = kCodecH264; 178 codec = kCodecH264;
171 mime_type = "video/avc"; 179 mime_type = "video/avc";
172 frame_input_count = 30; 180 frame_input_count = 30;
181 i_frame_interval = IFRAME_INTERVAL_H264;
173 } else { 182 } else {
174 return false; 183 return false;
175 } 184 }
176 185
177 frame_size_ = input_visible_size; 186 frame_size_ = input_visible_size;
178 last_set_bitrate_ = initial_bitrate; 187 last_set_bitrate_ = initial_bitrate;
179 188
180 // Only consider using MediaCodec if it's likely backed by hardware. 189 // Only consider using MediaCodec if it's likely backed by hardware.
181 if (VideoCodecBridge::IsKnownUnaccelerated(codec, MEDIA_CODEC_ENCODER)) { 190 if (VideoCodecBridge::IsKnownUnaccelerated(codec, MEDIA_CODEC_ENCODER)) {
182 DLOG(ERROR) << "No HW support"; 191 DLOG(ERROR) << "No HW support";
183 return false; 192 return false;
184 } 193 }
185 194
186 PixelFormat pixel_format = COLOR_FORMAT_YUV420_SEMIPLANAR; 195 PixelFormat pixel_format = COLOR_FORMAT_YUV420_SEMIPLANAR;
187 if (!GetSupportedColorFormatForMime(mime_type, &pixel_format)) { 196 if (!GetSupportedColorFormatForMime(mime_type, &pixel_format)) {
188 DLOG(ERROR) << "No color format support."; 197 DLOG(ERROR) << "No color format support.";
189 return false; 198 return false;
190 } 199 }
191 media_codec_.reset(VideoCodecBridge::CreateEncoder( 200 media_codec_.reset(VideoCodecBridge::CreateEncoder(
192 codec, input_visible_size, initial_bitrate, INITIAL_FRAMERATE, 201 codec, input_visible_size, initial_bitrate, INITIAL_FRAMERATE,
193 IFRAME_INTERVAL, pixel_format)); 202 i_frame_interval, pixel_format));
194 203
195 if (!media_codec_) { 204 if (!media_codec_) {
196 DLOG(ERROR) << "Failed to create/start the codec: " 205 DLOG(ERROR) << "Failed to create/start the codec: "
197 << input_visible_size.ToString(); 206 << input_visible_size.ToString();
198 return false; 207 return false;
199 } 208 }
200 209
201 // Conservative upper bound for output buffer size: decoded size + 2KB. 210 // Conservative upper bound for output buffer size: decoded size + 2KB.
202 const size_t output_buffer_capacity = 211 const size_t output_buffer_capacity =
203 VideoFrame::AllocationSize(format, input_visible_size) + 2048; 212 VideoFrame::AllocationSize(format, input_visible_size) + 2048;
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after
340 // Why NV12? Because COLOR_FORMAT_YUV420_SEMIPLANAR. See comment at other 349 // Why NV12? Because COLOR_FORMAT_YUV420_SEMIPLANAR. See comment at other
341 // mention of that constant. 350 // mention of that constant.
342 bool converted = !libyuv::I420ToNV12( 351 bool converted = !libyuv::I420ToNV12(
343 frame->data(VideoFrame::kYPlane), frame->stride(VideoFrame::kYPlane), 352 frame->data(VideoFrame::kYPlane), frame->stride(VideoFrame::kYPlane),
344 frame->data(VideoFrame::kUPlane), frame->stride(VideoFrame::kUPlane), 353 frame->data(VideoFrame::kUPlane), frame->stride(VideoFrame::kUPlane),
345 frame->data(VideoFrame::kVPlane), frame->stride(VideoFrame::kVPlane), 354 frame->data(VideoFrame::kVPlane), frame->stride(VideoFrame::kVPlane),
346 dst_y, dst_stride_y, dst_uv, dst_stride_uv, frame->coded_size().width(), 355 dst_y, dst_stride_y, dst_uv, dst_stride_uv, frame->coded_size().width(),
347 frame->coded_size().height()); 356 frame->coded_size().height());
348 RETURN_ON_FAILURE(converted, "Failed to I420ToNV12!", kPlatformFailureError); 357 RETURN_ON_FAILURE(converted, "Failed to I420ToNV12!", kPlatformFailureError);
349 358
350 fake_input_timestamp_ += base::TimeDelta::FromMicroseconds(1); 359 input_timestamp_ += base::TimeDelta::FromMicroseconds(kNumMicrosecsPerSec /
360 INITIAL_FRAMERATE);
351 status = media_codec_->QueueInputBuffer(input_buf_index, nullptr, queued_size, 361 status = media_codec_->QueueInputBuffer(input_buf_index, nullptr, queued_size,
352 fake_input_timestamp_); 362 input_timestamp_);
353 UMA_HISTOGRAM_TIMES("Media.AVDA.InputQueueTime", 363 UMA_HISTOGRAM_TIMES("Media.AVDA.InputQueueTime",
354 base::Time::Now() - std::get<2>(input)); 364 base::Time::Now() - std::get<2>(input));
355 RETURN_ON_FAILURE(status == MEDIA_CODEC_OK, 365 RETURN_ON_FAILURE(status == MEDIA_CODEC_OK,
356 "Failed to QueueInputBuffer: " << status, 366 "Failed to QueueInputBuffer: " << status,
357 kPlatformFailureError); 367 kPlatformFailureError);
358 ++num_buffers_at_codec_; 368 ++num_buffers_at_codec_;
359 pending_frames_.pop(); 369 pending_frames_.pop();
360 } 370 }
361 371
362 void AndroidVideoEncodeAccelerator::DequeueOutput() { 372 void AndroidVideoEncodeAccelerator::DequeueOutput() {
(...skipping 13 matching lines...) Expand all
376 switch (status) { 386 switch (status) {
377 case MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER: 387 case MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER:
378 return; 388 return;
379 389
380 case MEDIA_CODEC_ERROR: 390 case MEDIA_CODEC_ERROR:
381 RETURN_ON_FAILURE(false, "Codec error", kPlatformFailureError); 391 RETURN_ON_FAILURE(false, "Codec error", kPlatformFailureError);
382 // Unreachable because of previous statement, but included for clarity. 392 // Unreachable because of previous statement, but included for clarity.
383 return; 393 return;
384 394
385 case MEDIA_CODEC_OUTPUT_FORMAT_CHANGED: 395 case MEDIA_CODEC_OUTPUT_FORMAT_CHANGED:
386 break; 396 return;
387 397
388 case MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED: 398 case MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED:
389 break; 399 return;
390 400
391 case MEDIA_CODEC_OK: 401 case MEDIA_CODEC_OK:
392 DCHECK_GE(buf_index, 0); 402 DCHECK_GE(buf_index, 0);
393 break; 403 break;
394 404
395 default: 405 default:
396 NOTREACHED(); 406 NOTREACHED();
397 break; 407 break;
398 } 408 }
399 } while (buf_index < 0); 409 } while (buf_index < 0);
(...skipping 15 matching lines...) Expand all
415 --num_buffers_at_codec_; 425 --num_buffers_at_codec_;
416 426
417 base::ThreadTaskRunnerHandle::Get()->PostTask( 427 base::ThreadTaskRunnerHandle::Get()->PostTask(
418 FROM_HERE, 428 FROM_HERE,
419 base::Bind(&VideoEncodeAccelerator::Client::BitstreamBufferReady, 429 base::Bind(&VideoEncodeAccelerator::Client::BitstreamBufferReady,
420 client_ptr_factory_->GetWeakPtr(), bitstream_buffer.id(), size, 430 client_ptr_factory_->GetWeakPtr(), bitstream_buffer.id(), size,
421 key_frame, base::Time::Now() - base::Time())); 431 key_frame, base::Time::Now() - base::Time()));
422 } 432 }
423 433
424 } // namespace media 434 } // namespace media
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698