Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(197)

Side by Side Diff: content/common/gpu/media/android_video_encode_accelerator.cc

Issue 74563002: AndroidVideoEncodeAccelerator is born! (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: Added blacklist-based disabling Created 7 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
(Empty)
1 // Copyright (c) 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "content/common/gpu/media/android_video_encode_accelerator.h"
6
7 #include "base/bind.h"
8 #include "base/command_line.h"
9 #include "base/logging.h"
10 #include "base/message_loop/message_loop.h"
11 #include "base/metrics/histogram.h"
12 #include "content/common/gpu/gpu_channel.h"
13 #include "content/public/common/content_switches.h"
14 #include "gpu/command_buffer/service/gles2_cmd_decoder.h"
15 #include "media/base/android/media_codec_bridge.h"
16 #include "media/base/bitstream_buffer.h"
17 #include "media/base/limits.h"
18 #include "media/video/picture.h"
19 #include "third_party/libyuv/include/libyuv/convert_from.h"
20 #include "ui/gl/android/scoped_java_surface.h"
21 #include "ui/gl/gl_bindings.h"
22
23 using media::MediaCodecBridge;
24 using media::VideoCodecBridge;
25 using media::VideoFrame;
26
27 namespace content {
28
29 enum {
30 // Subset of MediaCodecInfo.CodecCapabilities.
31 COLOR_FORMAT_YUV420_SEMIPLANAR = 21,
32 };
33
34 // Helper macros for dealing with failure. If |result| evaluates false, emit
35 // |log| to DLOG(ERROR), register |error| with the client, and return.
36 #define RETURN_ON_FAILURE(result, log, error) \
37 do { \
38 if (!(result)) { \
39 DLOG(ERROR) << log; \
40 if (client_ptr_factory_.GetWeakPtr()) { \
41 client_ptr_factory_.GetWeakPtr()->NotifyError(error); \
42 client_ptr_factory_.InvalidateWeakPtrs(); \
43 } \
44 return; \
45 } \
46 } while (0)
47
48 static inline const base::TimeDelta EncodePollDelay() {
49 // Arbitrary choice that trades off outgoing latency against CPU utilization.
50 // Mirrors android_video_decode_accelerator.cc::DecodePollDelay().
51 return base::TimeDelta::FromMilliseconds(10);
52 }
53
54 static inline const base::TimeDelta NoWaitTimeOut() {
55 return base::TimeDelta::FromMicroseconds(0);
56 }
57
58 AndroidVideoEncodeAccelerator::AndroidVideoEncodeAccelerator(
59 media::VideoEncodeAccelerator::Client* client)
60 : client_ptr_factory_(client),
61 num_buffers_at_codec_(0),
62 num_output_buffers_(-1),
63 output_buffers_capacity_(0),
64 last_set_bitrate_(0) {}
65
66 AndroidVideoEncodeAccelerator::~AndroidVideoEncodeAccelerator() {
67 DCHECK(thread_checker_.CalledOnValidThread());
68 }
69
70 // static
71 std::vector<media::VideoEncodeAccelerator::SupportedProfile>
72 AndroidVideoEncodeAccelerator::GetSupportedProfiles() {
73 std::vector<MediaCodecBridge::CodecsInfo> codecs_info =
74 MediaCodecBridge::GetCodecsInfo();
75
76 std::vector<SupportedProfile> profiles;
77
78 const CommandLine* cmd_line = CommandLine::ForCurrentProcess();
79 if (cmd_line->HasSwitch(switches::kDisableWebRtcHWEncoding))
80 return profiles;
81
82 for (size_t i = 0; i < codecs_info.size(); ++i) {
83 const MediaCodecBridge::CodecsInfo& info = codecs_info[i];
84 if (!info.is_encoder || info.codecs != "vp8" ||
85 VideoCodecBridge::IsKnownUnaccelerated(media::kCodecVP8, true)) {
86 // We're only looking for a HW VP8 encoder.
87 continue;
88 }
89 SupportedProfile profile;
90 profile.profile = media::VP8PROFILE_MAIN;
91 // Wouldn't it be nice if MediaCodec exposed the maximum capabilities of the
92 // encoder? Sure would be. Too bad it doesn't. So we hard-code some
93 // reasonable defaults.
94 profile.max_resolution.SetSize(1920, 1088);
95 profile.max_framerate.numerator = 30;
96 profile.max_framerate.denominator = 1;
97 profiles.push_back(profile);
98 }
99 return profiles;
100 }
101
102 void AndroidVideoEncodeAccelerator::Initialize(
103 VideoFrame::Format format,
104 const gfx::Size& input_visible_size,
105 media::VideoCodecProfile output_profile,
106 uint32 initial_bitrate) {
107 DVLOG(3) << __PRETTY_FUNCTION__ << " format: " << format
108 << ", input_visible_size: " << input_visible_size.ToString()
109 << ", output_profile: " << output_profile
110 << ", initial_bitrate: " << initial_bitrate;
111 DCHECK(!media_codec_);
112 DCHECK(thread_checker_.CalledOnValidThread());
113
114 RETURN_ON_FAILURE(media::MediaCodecBridge::IsAvailable() &&
115 media::MediaCodecBridge::SupportsSetParameters() &&
116 format == VideoFrame::I420 &&
117 output_profile == media::VP8PROFILE_MAIN,
118 "Unexpected combo: " << format << ", " << output_profile,
119 kInvalidArgumentError);
120
121 last_set_bitrate_ = initial_bitrate;
122
123 // Only consider using MediaCodec if it's likely backed by hardware.
124 RETURN_ON_FAILURE(
125 !media::VideoCodecBridge::IsKnownUnaccelerated(media::kCodecVP8, true),
126 "No HW support",
127 kPlatformFailureError);
128
129 // TODO(fischman): when there is more HW out there with different color-space
130 // support, this should turn into a negotiation with the codec for supported
131 // formats. For now we use the only format supported by the only available
132 // HW.
133 media_codec_.reset(
134 media::VideoCodecBridge::CreateEncoder(media::kCodecVP8,
135 input_visible_size,
136 initial_bitrate,
137 INITIAL_FRAMERATE,
138 IFRAME_INTERVAL,
139 COLOR_FORMAT_YUV420_SEMIPLANAR));
140
141 RETURN_ON_FAILURE(
142 media_codec_,
143 "Failed to create/start the codec: " << input_visible_size.ToString(),
144 kPlatformFailureError);
145
146 base::MessageLoop::current()->PostTask(
147 FROM_HERE,
148 base::Bind(&VideoEncodeAccelerator::Client::NotifyInitializeDone,
149 client_ptr_factory_.GetWeakPtr()));
150
151 num_output_buffers_ = media_codec_->GetOutputBuffersCount();
152 output_buffers_capacity_ = media_codec_->GetOutputBuffersCapacity();
153 base::MessageLoop::current()->PostTask(
154 FROM_HERE,
155 base::Bind(&VideoEncodeAccelerator::Client::RequireBitstreamBuffers,
156 client_ptr_factory_.GetWeakPtr(),
157 num_output_buffers_,
158 input_visible_size,
159 output_buffers_capacity_));
160 }
161
162 void AndroidVideoEncodeAccelerator::MaybeStartIOTimer() {
163 if (!io_timer_.IsRunning() &&
164 (num_buffers_at_codec_ > 0 || !pending_frames_.empty())) {
165 io_timer_.Start(FROM_HERE,
166 EncodePollDelay(),
167 this,
168 &AndroidVideoEncodeAccelerator::DoIOTask);
169 }
170 }
171
172 void AndroidVideoEncodeAccelerator::MaybeStopIOTimer() {
173 if (io_timer_.IsRunning() &&
174 (num_buffers_at_codec_ == 0 && pending_frames_.empty())) {
175 io_timer_.Stop();
176 }
177 }
178
179 void AndroidVideoEncodeAccelerator::Encode(
180 const scoped_refptr<VideoFrame>& frame,
181 bool force_keyframe) {
182 DVLOG(3) << __PRETTY_FUNCTION__ << ": " << force_keyframe;
183 DCHECK(thread_checker_.CalledOnValidThread());
184 RETURN_ON_FAILURE(frame->format() == VideoFrame::I420,
185 "Unexpected format",
186 kInvalidArgumentError);
187
188 // MediaCodec doesn't have a way to specify stride for non-Packed formats, so
189 // we insist on being called with packed frames and no cropping :(
190 RETURN_ON_FAILURE(frame->row_bytes(VideoFrame::kYPlane) ==
191 frame->stride(VideoFrame::kYPlane) &&
192 frame->row_bytes(VideoFrame::kUPlane) ==
193 frame->stride(VideoFrame::kUPlane) &&
194 frame->row_bytes(VideoFrame::kVPlane) ==
195 frame->stride(VideoFrame::kVPlane) &&
196 gfx::Rect(frame->coded_size()) == frame->visible_rect(),
197 "Non-packed frame, or visible rect != coded size",
198 kInvalidArgumentError);
199
200 pending_frames_.push(MakeTuple(frame, force_keyframe, base::Time::Now()));
201 DoIOTask();
202 }
203
204 void AndroidVideoEncodeAccelerator::UseOutputBitstreamBuffer(
205 const media::BitstreamBuffer& buffer) {
206 DVLOG(3) << __PRETTY_FUNCTION__ << ": bitstream_buffer_id=" << buffer.id();
207 DCHECK(thread_checker_.CalledOnValidThread());
208 RETURN_ON_FAILURE(buffer.size() >= media_codec_->GetOutputBuffersCapacity(),
209 "Output buffers too small!",
210 kInvalidArgumentError);
211 available_bitstream_buffers_.push_back(buffer);
212 DoIOTask();
213 }
214
215 void AndroidVideoEncodeAccelerator::RequestEncodingParametersChange(
216 uint32 bitrate,
217 uint32 framerate) {
218 DVLOG(3) << __PRETTY_FUNCTION__ << ": bitrate: " << bitrate
219 << ", framerate: " << framerate;
220 DCHECK(thread_checker_.CalledOnValidThread());
221 if (bitrate != last_set_bitrate_) {
222 last_set_bitrate_ = bitrate;
223 media_codec_->SetVideoBitrate(bitrate);
224 }
225 // Note: Android's MediaCodec doesn't allow mid-stream adjustments to
226 // framerate, so we ignore that here. This is OK because Android only uses
227 // the framerate value from MediaFormat during configure() as a proxy for
228 // bitrate, and we set that explicitly.
229 }
230
231 void AndroidVideoEncodeAccelerator::Destroy() {
232 DVLOG(3) << __PRETTY_FUNCTION__;
233 DCHECK(thread_checker_.CalledOnValidThread());
234 client_ptr_factory_.InvalidateWeakPtrs();
235 if (media_codec_) {
236 if (io_timer_.IsRunning())
237 io_timer_.Stop();
238 media_codec_->Stop();
239 }
240 delete this;
241 }
242
243 void AndroidVideoEncodeAccelerator::DoIOTask() {
244 QueueInput();
245 DequeueOutput();
246 MaybeStartIOTimer();
247 MaybeStopIOTimer();
248 }
249
250 void AndroidVideoEncodeAccelerator::QueueInput() {
251 if (!client_ptr_factory_.GetWeakPtr() || pending_frames_.empty())
252 return;
253
254 int input_buf_index = 0;
255 media::MediaCodecStatus status =
256 media_codec_->DequeueInputBuffer(NoWaitTimeOut(), &input_buf_index);
257 if (status != media::MEDIA_CODEC_OK) {
258 DCHECK(status == media::MEDIA_CODEC_DEQUEUE_INPUT_AGAIN_LATER ||
259 status == media::MEDIA_CODEC_ERROR);
260 RETURN_ON_FAILURE(status != media::MEDIA_CODEC_ERROR,
261 "MediaCodec error",
262 kPlatformFailureError);
263 return;
264 }
265
266 const PendingFrames::value_type& input = pending_frames_.front();
267 bool is_key_frame = input.b;
268 if (is_key_frame) {
269 // Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could
270 // indicate this in the QueueInputBuffer() call below and guarantee _this_
271 // frame be encoded as a key frame, but sadly that flag is ignored.
272 // Instead, we request a key frame "soon".
273 media_codec_->RequestKeyFrameSoon();
274 }
275 scoped_refptr<VideoFrame> frame = input.a;
276
277 uint8* buffer = NULL;
278 size_t capacity = 0;
279 media_codec_->GetInputBuffer(input_buf_index, &buffer, &capacity);
280
281 size_t queued_size =
282 VideoFrame::AllocationSize(VideoFrame::I420, frame->coded_size());
283 RETURN_ON_FAILURE(capacity >= queued_size,
284 "Failed to get input buffer: " << input_buf_index,
285 kPlatformFailureError);
286
287 uint8* dst_y = buffer;
288 int dst_stride_y = frame->stride(VideoFrame::kYPlane);
289 uint8* dst_uv = buffer + frame->stride(VideoFrame::kYPlane) *
290 frame->rows(VideoFrame::kYPlane);
291 int dst_stride_uv = frame->stride(VideoFrame::kUPlane) * 2;
292 // Why NV12? Because COLOR_FORMAT_YUV420_SEMIPLANAR. See comment at other
293 // mention of that constant.
294 bool converted = !libyuv::I420ToNV12(frame->data(VideoFrame::kYPlane),
295 frame->stride(VideoFrame::kYPlane),
296 frame->data(VideoFrame::kUPlane),
297 frame->stride(VideoFrame::kUPlane),
298 frame->data(VideoFrame::kVPlane),
299 frame->stride(VideoFrame::kVPlane),
300 dst_y,
301 dst_stride_y,
302 dst_uv,
303 dst_stride_uv,
304 frame->coded_size().width(),
305 frame->coded_size().height());
306 RETURN_ON_FAILURE(converted, "Failed to I420ToNV12!", kPlatformFailureError);
307
308 fake_input_timestamp_ += base::TimeDelta::FromMicroseconds(1);
309 status = media_codec_->QueueInputBuffer(
310 input_buf_index, NULL, queued_size, fake_input_timestamp_);
311 UMA_HISTOGRAM_TIMES("Media.AVEA.InputQueueTime", base::Time::Now() - input.c);
312 RETURN_ON_FAILURE(status == media::MEDIA_CODEC_OK,
313 "Failed to QueueInputBuffer: " << status,
314 kPlatformFailureError);
315 ++num_buffers_at_codec_;
316 pending_frames_.pop();
317 }
318
319 bool AndroidVideoEncodeAccelerator::DoOutputBuffersSuffice() {
320 // If this returns false ever, then the VEA::Client interface will need to
321 // grow a DismissBitstreamBuffer() call, and VEA::Client impls will have to be
322 // prepared to field multiple requests to RequireBitstreamBuffers().
323 int count = media_codec_->GetOutputBuffersCount();
324 size_t capacity = media_codec_->GetOutputBuffersCapacity();
325 bool ret = media_codec_->GetOutputBuffers() && count <= num_output_buffers_ &&
326 capacity <= output_buffers_capacity_;
327 LOG_IF(ERROR, !ret) << "Need more/bigger buffers; before: "
328 << num_output_buffers_ << "x" << output_buffers_capacity_
329 << ", now: " << count << "x" << capacity;
330 UMA_HISTOGRAM_BOOLEAN("Media.AVEA.OutputBuffersSuffice", ret);
331 return ret;
332 }
333
334 void AndroidVideoEncodeAccelerator::DequeueOutput() {
335 if (!client_ptr_factory_.GetWeakPtr() ||
336 available_bitstream_buffers_.empty() || num_buffers_at_codec_ == 0) {
337 return;
338 }
339
340 int32 buf_index = 0;
341 size_t offset = 0;
342 size_t size = 0;
343 bool key_frame = false;
344 do {
345 media::MediaCodecStatus status = media_codec_->DequeueOutputBuffer(
346 NoWaitTimeOut(), &buf_index, &offset, &size, NULL, NULL, &key_frame);
347 switch (status) {
348 case media::MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER:
349 return;
350
351 case media::MEDIA_CODEC_ERROR:
352 RETURN_ON_FAILURE(false, "Codec error", kPlatformFailureError);
353 // Unreachable because of previous statement, but included for clarity.
354 return;
355
356 case media::MEDIA_CODEC_OUTPUT_FORMAT_CHANGED: // Fall-through.
357 case media::MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED:
358 RETURN_ON_FAILURE(DoOutputBuffersSuffice(),
359 "Bitstream now requires more/larger buffers",
360 kPlatformFailureError);
361 break;
362
363 case media::MEDIA_CODEC_OK:
364 DCHECK_GE(buf_index, 0);
365 break;
366
367 default:
368 NOTREACHED();
369 break;
370 }
371 } while (buf_index < 0);
372
373 media::BitstreamBuffer bitstream_buffer = available_bitstream_buffers_.back();
374 available_bitstream_buffers_.pop_back();
375 scoped_ptr<base::SharedMemory> shm(
376 new base::SharedMemory(bitstream_buffer.handle(), false));
377 RETURN_ON_FAILURE(shm->Map(bitstream_buffer.size()),
378 "Failed to map SHM",
379 kPlatformFailureError);
380 RETURN_ON_FAILURE(size <= shm->mapped_size(),
381 "Encoded buffer too large: " << size << ">"
382 << shm->mapped_size(),
383 kPlatformFailureError);
384
385 media_codec_->CopyFromOutputBuffer(buf_index, offset, shm->memory(), size);
386 media_codec_->ReleaseOutputBuffer(buf_index, false);
387 --num_buffers_at_codec_;
388
389 UMA_HISTOGRAM_COUNTS_10000("Media.AVEA.EncodedBufferSizeKB", size / 1024);
390 base::MessageLoop::current()->PostTask(
391 FROM_HERE,
392 base::Bind(&VideoEncodeAccelerator::Client::BitstreamBufferReady,
393 client_ptr_factory_.GetWeakPtr(),
394 bitstream_buffer.id(),
395 size,
396 key_frame));
397 }
398
399 } // namespace content
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698