Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(55)

Side by Side Diff: content/common/gpu/media/android_video_encode_accelerator.cc

Issue 74563002: AndroidVideoEncodeAccelerator is born! (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: kbr comments. Created 7 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
(Empty)
1 // Copyright (c) 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "content/common/gpu/media/android_video_encode_accelerator.h"
6
7 #include "base/bind.h"
8 #include "base/command_line.h"
9 #include "base/logging.h"
10 #include "base/message_loop/message_loop.h"
11 #include "base/metrics/histogram.h"
12 #include "content/common/gpu/gpu_channel.h"
13 #include "content/public/common/content_switches.h"
14 #include "gpu/command_buffer/service/gles2_cmd_decoder.h"
15 #include "media/base/android/media_codec_bridge.h"
16 #include "media/base/bitstream_buffer.h"
17 #include "media/base/limits.h"
18 #include "media/video/picture.h"
19 #include "third_party/libyuv/include/libyuv/convert_from.h"
20 #include "ui/gl/android/scoped_java_surface.h"
21 #include "ui/gl/gl_bindings.h"
22
23 using media::MediaCodecBridge;
24 using media::VideoCodecBridge;
25 using media::VideoFrame;
26
27 namespace content {
28
29 enum {
30 // Subset of MediaCodecInfo.CodecCapabilities.
31 COLOR_FORMAT_YUV420_SEMIPLANAR = 21,
32 };
33
34 // Helper macros for dealing with failure. If |result| evaluates false, emit
35 // |log| to DLOG(ERROR), register |error| with the client, and return.
36 #define RETURN_ON_FAILURE(result, log, error) \
37 do { \
38 if (!(result)) { \
39 DLOG(ERROR) << log; \
40 if (client_ptr_factory_.GetWeakPtr()) { \
41 client_ptr_factory_.GetWeakPtr()->NotifyError(error); \
42 client_ptr_factory_.InvalidateWeakPtrs(); \
43 } \
44 return; \
45 } \
46 } while (0)
47
48 static inline const base::TimeDelta EncodePollDelay() {
49 // Arbitrary choice that trades off outgoing latency against CPU utilization.
50 // Mirrors android_video_decode_accelerator.cc::DecodePollDelay().
51 return base::TimeDelta::FromMilliseconds(10);
52 }
53
54 static inline const base::TimeDelta NoWaitTimeOut() {
55 return base::TimeDelta::FromMicroseconds(0);
56 }
57
58 AndroidVideoEncodeAccelerator::AndroidVideoEncodeAccelerator(
59 media::VideoEncodeAccelerator::Client* client)
60 : client_ptr_factory_(client),
61 num_buffers_at_codec_(0),
62 num_output_buffers_(-1),
63 output_buffers_capacity_(0),
64 last_set_bitrate_(0) {}
65
66 AndroidVideoEncodeAccelerator::~AndroidVideoEncodeAccelerator() {
67 DCHECK(thread_checker_.CalledOnValidThread());
68 }
69
70 // static
71 std::vector<media::VideoEncodeAccelerator::SupportedProfile>
72 AndroidVideoEncodeAccelerator::GetSupportedProfiles() {
73 std::vector<MediaCodecBridge::CodecsInfo> codecs_info =
74 MediaCodecBridge::GetCodecsInfo();
75
76 std::vector<SupportedProfile> profiles;
77
78 const CommandLine* cmd_line = CommandLine::ForCurrentProcess();
79 if (cmd_line->HasSwitch(switches::kDisableWebRtcHWEncoding))
80 return profiles;
81
82 for (size_t i = 0; i < codecs_info.size(); ++i) {
83 const MediaCodecBridge::CodecsInfo& info = codecs_info[i];
84 if (info.direction != media::MEDIA_CODEC_ENCODER || info.codecs != "vp8" ||
85 VideoCodecBridge::IsKnownUnaccelerated(media::kCodecVP8,
86 media::MEDIA_CODEC_ENCODER)) {
87 // We're only looking for a HW VP8 encoder.
88 continue;
89 }
90 SupportedProfile profile;
91 profile.profile = media::VP8PROFILE_MAIN;
92 // Wouldn't it be nice if MediaCodec exposed the maximum capabilities of the
93 // encoder? Sure would be. Too bad it doesn't. So we hard-code some
94 // reasonable defaults.
95 profile.max_resolution.SetSize(1920, 1088);
96 profile.max_framerate.numerator = 30;
97 profile.max_framerate.denominator = 1;
98 profiles.push_back(profile);
99 }
100 return profiles;
101 }
102
103 void AndroidVideoEncodeAccelerator::Initialize(
104 VideoFrame::Format format,
105 const gfx::Size& input_visible_size,
106 media::VideoCodecProfile output_profile,
107 uint32 initial_bitrate) {
108 DVLOG(3) << __PRETTY_FUNCTION__ << " format: " << format
109 << ", input_visible_size: " << input_visible_size.ToString()
110 << ", output_profile: " << output_profile
111 << ", initial_bitrate: " << initial_bitrate;
112 DCHECK(!media_codec_);
113 DCHECK(thread_checker_.CalledOnValidThread());
114
115 RETURN_ON_FAILURE(media::MediaCodecBridge::IsAvailable() &&
116 media::MediaCodecBridge::SupportsSetParameters() &&
117 format == VideoFrame::I420 &&
118 output_profile == media::VP8PROFILE_MAIN,
119 "Unexpected combo: " << format << ", " << output_profile,
120 kInvalidArgumentError);
121
122 last_set_bitrate_ = initial_bitrate;
123
124 // Only consider using MediaCodec if it's likely backed by hardware.
125 RETURN_ON_FAILURE(!media::VideoCodecBridge::IsKnownUnaccelerated(
126 media::kCodecVP8, media::MEDIA_CODEC_ENCODER),
127 "No HW support",
128 kPlatformFailureError);
129
130 // TODO(fischman): when there is more HW out there with different color-space
131 // support, this should turn into a negotiation with the codec for supported
132 // formats. For now we use the only format supported by the only available
133 // HW.
134 media_codec_.reset(
135 media::VideoCodecBridge::CreateEncoder(media::kCodecVP8,
136 input_visible_size,
137 initial_bitrate,
138 INITIAL_FRAMERATE,
139 IFRAME_INTERVAL,
140 COLOR_FORMAT_YUV420_SEMIPLANAR));
141
142 RETURN_ON_FAILURE(
143 media_codec_,
144 "Failed to create/start the codec: " << input_visible_size.ToString(),
145 kPlatformFailureError);
146
147 base::MessageLoop::current()->PostTask(
148 FROM_HERE,
149 base::Bind(&VideoEncodeAccelerator::Client::NotifyInitializeDone,
150 client_ptr_factory_.GetWeakPtr()));
151
152 num_output_buffers_ = media_codec_->GetOutputBuffersCount();
153 output_buffers_capacity_ = media_codec_->GetOutputBuffersCapacity();
154 base::MessageLoop::current()->PostTask(
155 FROM_HERE,
156 base::Bind(&VideoEncodeAccelerator::Client::RequireBitstreamBuffers,
157 client_ptr_factory_.GetWeakPtr(),
158 num_output_buffers_,
159 input_visible_size,
160 output_buffers_capacity_));
161 }
162
163 void AndroidVideoEncodeAccelerator::MaybeStartIOTimer() {
164 if (!io_timer_.IsRunning() &&
165 (num_buffers_at_codec_ > 0 || !pending_frames_.empty())) {
166 io_timer_.Start(FROM_HERE,
167 EncodePollDelay(),
168 this,
169 &AndroidVideoEncodeAccelerator::DoIOTask);
170 }
171 }
172
173 void AndroidVideoEncodeAccelerator::MaybeStopIOTimer() {
174 if (io_timer_.IsRunning() &&
175 (num_buffers_at_codec_ == 0 && pending_frames_.empty())) {
176 io_timer_.Stop();
177 }
178 }
179
180 void AndroidVideoEncodeAccelerator::Encode(
181 const scoped_refptr<VideoFrame>& frame,
182 bool force_keyframe) {
183 DVLOG(3) << __PRETTY_FUNCTION__ << ": " << force_keyframe;
184 DCHECK(thread_checker_.CalledOnValidThread());
185 RETURN_ON_FAILURE(frame->format() == VideoFrame::I420,
186 "Unexpected format",
187 kInvalidArgumentError);
188
189 // MediaCodec doesn't have a way to specify stride for non-Packed formats, so
190 // we insist on being called with packed frames and no cropping :(
191 RETURN_ON_FAILURE(frame->row_bytes(VideoFrame::kYPlane) ==
192 frame->stride(VideoFrame::kYPlane) &&
193 frame->row_bytes(VideoFrame::kUPlane) ==
194 frame->stride(VideoFrame::kUPlane) &&
195 frame->row_bytes(VideoFrame::kVPlane) ==
196 frame->stride(VideoFrame::kVPlane) &&
197 gfx::Rect(frame->coded_size()) == frame->visible_rect(),
198 "Non-packed frame, or visible rect != coded size",
199 kInvalidArgumentError);
200
201 pending_frames_.push(MakeTuple(frame, force_keyframe, base::Time::Now()));
202 DoIOTask();
203 }
204
205 void AndroidVideoEncodeAccelerator::UseOutputBitstreamBuffer(
206 const media::BitstreamBuffer& buffer) {
207 DVLOG(3) << __PRETTY_FUNCTION__ << ": bitstream_buffer_id=" << buffer.id();
208 DCHECK(thread_checker_.CalledOnValidThread());
209 RETURN_ON_FAILURE(buffer.size() >= media_codec_->GetOutputBuffersCapacity(),
210 "Output buffers too small!",
211 kInvalidArgumentError);
212 available_bitstream_buffers_.push_back(buffer);
213 DoIOTask();
214 }
215
216 void AndroidVideoEncodeAccelerator::RequestEncodingParametersChange(
217 uint32 bitrate,
218 uint32 framerate) {
219 DVLOG(3) << __PRETTY_FUNCTION__ << ": bitrate: " << bitrate
220 << ", framerate: " << framerate;
221 DCHECK(thread_checker_.CalledOnValidThread());
222 if (bitrate != last_set_bitrate_) {
223 last_set_bitrate_ = bitrate;
224 media_codec_->SetVideoBitrate(bitrate);
225 }
226 // Note: Android's MediaCodec doesn't allow mid-stream adjustments to
227 // framerate, so we ignore that here. This is OK because Android only uses
228 // the framerate value from MediaFormat during configure() as a proxy for
229 // bitrate, and we set that explicitly.
230 }
231
232 void AndroidVideoEncodeAccelerator::Destroy() {
233 DVLOG(3) << __PRETTY_FUNCTION__;
234 DCHECK(thread_checker_.CalledOnValidThread());
235 client_ptr_factory_.InvalidateWeakPtrs();
236 if (media_codec_) {
237 if (io_timer_.IsRunning())
238 io_timer_.Stop();
239 media_codec_->Stop();
240 }
241 delete this;
242 }
243
244 void AndroidVideoEncodeAccelerator::DoIOTask() {
245 QueueInput();
246 DequeueOutput();
247 MaybeStartIOTimer();
248 MaybeStopIOTimer();
249 }
250
251 void AndroidVideoEncodeAccelerator::QueueInput() {
252 if (!client_ptr_factory_.GetWeakPtr() || pending_frames_.empty())
253 return;
254
255 int input_buf_index = 0;
256 media::MediaCodecStatus status =
257 media_codec_->DequeueInputBuffer(NoWaitTimeOut(), &input_buf_index);
258 if (status != media::MEDIA_CODEC_OK) {
259 DCHECK(status == media::MEDIA_CODEC_DEQUEUE_INPUT_AGAIN_LATER ||
260 status == media::MEDIA_CODEC_ERROR);
261 RETURN_ON_FAILURE(status != media::MEDIA_CODEC_ERROR,
262 "MediaCodec error",
263 kPlatformFailureError);
264 return;
265 }
266
267 const PendingFrames::value_type& input = pending_frames_.front();
268 bool is_key_frame = input.b;
269 if (is_key_frame) {
270 // Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could
271 // indicate this in the QueueInputBuffer() call below and guarantee _this_
272 // frame be encoded as a key frame, but sadly that flag is ignored.
273 // Instead, we request a key frame "soon".
274 media_codec_->RequestKeyFrameSoon();
275 }
276 scoped_refptr<VideoFrame> frame = input.a;
277
278 uint8* buffer = NULL;
279 size_t capacity = 0;
280 media_codec_->GetInputBuffer(input_buf_index, &buffer, &capacity);
281
282 size_t queued_size =
283 VideoFrame::AllocationSize(VideoFrame::I420, frame->coded_size());
284 RETURN_ON_FAILURE(capacity >= queued_size,
285 "Failed to get input buffer: " << input_buf_index,
286 kPlatformFailureError);
287
288 uint8* dst_y = buffer;
289 int dst_stride_y = frame->stride(VideoFrame::kYPlane);
290 uint8* dst_uv = buffer + frame->stride(VideoFrame::kYPlane) *
291 frame->rows(VideoFrame::kYPlane);
fbarchard 2013/12/03 04:51:09 indent of line 291 looks wrong -> should be 4.
Ami GONE FROM CHROMIUM 2013/12/03 06:46:20 I obey clang-format and this is what it does.
292 int dst_stride_uv = frame->stride(VideoFrame::kUPlane) * 2;
293 // Why NV12? Because COLOR_FORMAT_YUV420_SEMIPLANAR. See comment at other
294 // mention of that constant.
295 bool converted = !libyuv::I420ToNV12(frame->data(VideoFrame::kYPlane),
296 frame->stride(VideoFrame::kYPlane),
297 frame->data(VideoFrame::kUPlane),
298 frame->stride(VideoFrame::kUPlane),
299 frame->data(VideoFrame::kVPlane),
300 frame->stride(VideoFrame::kVPlane),
301 dst_y,
302 dst_stride_y,
303 dst_uv,
304 dst_stride_uv,
305 frame->coded_size().width(),
306 frame->coded_size().height());
307 RETURN_ON_FAILURE(converted, "Failed to I420ToNV12!", kPlatformFailureError);
308
309 fake_input_timestamp_ += base::TimeDelta::FromMicroseconds(1);
310 status = media_codec_->QueueInputBuffer(
311 input_buf_index, NULL, queued_size, fake_input_timestamp_);
312 UMA_HISTOGRAM_TIMES("Media.AVEA.InputQueueTime", base::Time::Now() - input.c);
313 RETURN_ON_FAILURE(status == media::MEDIA_CODEC_OK,
314 "Failed to QueueInputBuffer: " << status,
315 kPlatformFailureError);
316 ++num_buffers_at_codec_;
317 pending_frames_.pop();
318 }
319
320 bool AndroidVideoEncodeAccelerator::DoOutputBuffersSuffice() {
321 // If this returns false ever, then the VEA::Client interface will need to
322 // grow a DismissBitstreamBuffer() call, and VEA::Client impls will have to be
323 // prepared to field multiple requests to RequireBitstreamBuffers().
324 int count = media_codec_->GetOutputBuffersCount();
325 size_t capacity = media_codec_->GetOutputBuffersCapacity();
326 bool ret = media_codec_->GetOutputBuffers() && count <= num_output_buffers_ &&
327 capacity <= output_buffers_capacity_;
328 LOG_IF(ERROR, !ret) << "Need more/bigger buffers; before: "
329 << num_output_buffers_ << "x" << output_buffers_capacity_
330 << ", now: " << count << "x" << capacity;
331 UMA_HISTOGRAM_BOOLEAN("Media.AVEA.OutputBuffersSuffice", ret);
332 return ret;
333 }
334
335 void AndroidVideoEncodeAccelerator::DequeueOutput() {
336 if (!client_ptr_factory_.GetWeakPtr() ||
337 available_bitstream_buffers_.empty() || num_buffers_at_codec_ == 0) {
338 return;
339 }
340
341 int32 buf_index = 0;
342 size_t offset = 0;
343 size_t size = 0;
344 bool key_frame = false;
345 do {
346 media::MediaCodecStatus status = media_codec_->DequeueOutputBuffer(
347 NoWaitTimeOut(), &buf_index, &offset, &size, NULL, NULL, &key_frame);
348 switch (status) {
349 case media::MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER:
350 return;
351
352 case media::MEDIA_CODEC_ERROR:
353 RETURN_ON_FAILURE(false, "Codec error", kPlatformFailureError);
354 // Unreachable because of previous statement, but included for clarity.
355 return;
356
357 case media::MEDIA_CODEC_OUTPUT_FORMAT_CHANGED: // Fall-through.
358 case media::MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED:
359 RETURN_ON_FAILURE(DoOutputBuffersSuffice(),
360 "Bitstream now requires more/larger buffers",
361 kPlatformFailureError);
362 break;
363
364 case media::MEDIA_CODEC_OK:
365 DCHECK_GE(buf_index, 0);
366 break;
367
368 default:
369 NOTREACHED();
370 break;
371 }
372 } while (buf_index < 0);
373
374 media::BitstreamBuffer bitstream_buffer = available_bitstream_buffers_.back();
375 available_bitstream_buffers_.pop_back();
376 scoped_ptr<base::SharedMemory> shm(
377 new base::SharedMemory(bitstream_buffer.handle(), false));
378 RETURN_ON_FAILURE(shm->Map(bitstream_buffer.size()),
379 "Failed to map SHM",
380 kPlatformFailureError);
381 RETURN_ON_FAILURE(size <= shm->mapped_size(),
382 "Encoded buffer too large: " << size << ">"
383 << shm->mapped_size(),
384 kPlatformFailureError);
385
386 media_codec_->CopyFromOutputBuffer(buf_index, offset, shm->memory(), size);
387 media_codec_->ReleaseOutputBuffer(buf_index, false);
388 --num_buffers_at_codec_;
389
390 UMA_HISTOGRAM_COUNTS_10000("Media.AVEA.EncodedBufferSizeKB", size / 1024);
391 base::MessageLoop::current()->PostTask(
392 FROM_HERE,
393 base::Bind(&VideoEncodeAccelerator::Client::BitstreamBufferReady,
394 client_ptr_factory_.GetWeakPtr(),
395 bitstream_buffer.id(),
396 size,
397 key_frame));
398 }
399
400 } // namespace content
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698