Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(371)

Side by Side Diff: content/common/gpu/media/android_video_decode_accelerator.cc

Issue 1313913003: Begin refactor of AVDA to support zero-copy. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: refactored into composable pieces, to see if it looks nicer. Created 5 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 // Copyright (c) 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "content/common/gpu/media/android_video_decode_accelerator.h"
6
7 #include "base/bind.h"
8 #include "base/logging.h"
9 #include "base/message_loop/message_loop.h"
10 #include "base/metrics/histogram.h"
11 #include "content/common/gpu/gpu_channel.h"
12 #include "gpu/command_buffer/service/gles2_cmd_decoder.h"
13 #include "media/base/bitstream_buffer.h"
14 #include "media/base/limits.h"
15 #include "media/base/video_decoder_config.h"
16 #include "media/video/picture.h"
17 #include "ui/gl/android/scoped_java_surface.h"
18 #include "ui/gl/android/surface_texture.h"
19 #include "ui/gl/gl_bindings.h"
20
21 namespace content {
22
23 // Helper macros for dealing with failure. If |result| evaluates false, emit
24 // |log| to ERROR, register |error| with the decoder, and return.
25 #define RETURN_ON_FAILURE(result, log, error) \
26 do { \
27 if (!(result)) { \
28 DLOG(ERROR) << log; \
29 base::MessageLoop::current()->PostTask( \
30 FROM_HERE, \
31 base::Bind(&AndroidVideoDecodeAccelerator::NotifyError, \
32 weak_this_factory_.GetWeakPtr(), \
33 error)); \
34 state_ = ERROR; \
35 return; \
36 } \
37 } while (0)
38
39 // TODO(dwkang): We only need kMaxVideoFrames to pass media stack's prerolling
40 // phase, but 1 is added due to crbug.com/176036. This should be tuned when we
41 // have actual use case.
42 enum { kNumPictureBuffers = media::limits::kMaxVideoFrames + 1 };
43
44 // Max number of bitstreams notified to the client with
45 // NotifyEndOfBitstreamBuffer() before getting output from the bitstream.
46 enum { kMaxBitstreamsNotifiedInAdvance = 32 };
47
48 #if defined(ENABLE_MEDIA_PIPELINE_ON_ANDROID)
49 // MediaCodec is only guaranteed to support baseline, but some devices may
50 // support others. Advertise support for all H264 profiles and let the
51 // MediaCodec fail when decoding if it's not actually supported. It's assumed
52 // that consumers won't have software fallback for H264 on Android anyway.
53 static const media::VideoCodecProfile kSupportedH264Profiles[] = {
54 media::H264PROFILE_BASELINE,
55 media::H264PROFILE_MAIN,
56 media::H264PROFILE_EXTENDED,
57 media::H264PROFILE_HIGH,
58 media::H264PROFILE_HIGH10PROFILE,
59 media::H264PROFILE_HIGH422PROFILE,
60 media::H264PROFILE_HIGH444PREDICTIVEPROFILE,
61 media::H264PROFILE_SCALABLEBASELINE,
62 media::H264PROFILE_SCALABLEHIGH,
63 media::H264PROFILE_STEREOHIGH,
64 media::H264PROFILE_MULTIVIEWHIGH
65 };
66 #endif
67
68 // Because MediaCodec is thread-hostile (must be poked on a single thread) and
69 // has no callback mechanism (b/11990118), we must drive it by polling for
70 // complete frames (and available input buffers, when the codec is fully
71 // saturated). This function defines the polling delay. The value used is an
72 // arbitrary choice that trades off CPU utilization (spinning) against latency.
73 // Mirrors android_video_encode_accelerator.cc:EncodePollDelay().
74 static inline const base::TimeDelta DecodePollDelay() {
75 // An alternative to this polling scheme could be to dedicate a new thread
76 // (instead of using the ChildThread) to run the MediaCodec, and make that
77 // thread use the timeout-based flavor of MediaCodec's dequeue methods when it
78 // believes the codec should complete "soon" (e.g. waiting for an input
79 // buffer, or waiting for a picture when it knows enough complete input
80 // pictures have been fed to saturate any internal buffering). This is
81 // speculative and it's unclear that this would be a win (nor that there's a
82 // reasonably device-agnostic way to fill in the "believes" above).
83 return base::TimeDelta::FromMilliseconds(10);
84 }
85
86 static inline const base::TimeDelta NoWaitTimeOut() {
87 return base::TimeDelta::FromMicroseconds(0);
88 }
89
90 AndroidVideoDecodeAccelerator::AndroidVideoDecodeAccelerator(
91 const base::WeakPtr<gpu::gles2::GLES2Decoder> decoder,
92 const base::Callback<bool(void)>& make_context_current)
93 : client_(NULL),
94 make_context_current_(make_context_current),
95 codec_(media::kCodecH264),
96 state_(NO_ERROR),
97 surface_texture_id_(0),
98 picturebuffers_requested_(false),
99 gl_decoder_(decoder),
100 weak_this_factory_(this) {}
101
102 AndroidVideoDecodeAccelerator::~AndroidVideoDecodeAccelerator() {
103 DCHECK(thread_checker_.CalledOnValidThread());
104 }
105
106 bool AndroidVideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile,
107 Client* client) {
108 DCHECK(!media_codec_);
109 DCHECK(thread_checker_.CalledOnValidThread());
110
111 client_ = client;
112 codec_ = VideoCodecProfileToVideoCodec(profile);
113
114 bool profile_supported = codec_ == media::kCodecVP8;
115 #if defined(ENABLE_MEDIA_PIPELINE_ON_ANDROID)
116 profile_supported |=
117 (codec_ == media::kCodecVP9 || codec_ == media::kCodecH264);
118 #endif
119
120 if (!profile_supported) {
121 LOG(ERROR) << "Unsupported profile: " << profile;
122 return false;
123 }
124
125 // Only use MediaCodec for VP8/9 if it's likely backed by hardware.
126 if ((codec_ == media::kCodecVP8 || codec_ == media::kCodecVP9) &&
127 media::VideoCodecBridge::IsKnownUnaccelerated(
128 codec_, media::MEDIA_CODEC_DECODER)) {
129 DVLOG(1) << "Initialization failed: "
130 << (codec_ == media::kCodecVP8 ? "vp8" : "vp9")
131 << " is not hardware accelerated";
132 return false;
133 }
134
135 if (!make_context_current_.Run()) {
136 LOG(ERROR) << "Failed to make this decoder's GL context current.";
137 return false;
138 }
139
140 if (!gl_decoder_) {
141 LOG(ERROR) << "Failed to get gles2 decoder instance.";
142 return false;
143 }
144 glGenTextures(1, &surface_texture_id_);
145 glActiveTexture(GL_TEXTURE0);
146 glBindTexture(GL_TEXTURE_EXTERNAL_OES, surface_texture_id_);
147
148 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
149 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
150 glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
151 GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
152 glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
153 GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
154 gl_decoder_->RestoreTextureUnitBindings(0);
155 gl_decoder_->RestoreActiveTexture();
156
157 surface_texture_ = gfx::SurfaceTexture::Create(surface_texture_id_);
158
159 if (!ConfigureMediaCodec()) {
160 LOG(ERROR) << "Failed to create MediaCodec instance.";
161 return false;
162 }
163
164 return true;
165 }
166
167 void AndroidVideoDecodeAccelerator::DoIOTask() {
168 DCHECK(thread_checker_.CalledOnValidThread());
169 if (state_ == ERROR) {
170 return;
171 }
172
173 QueueInput();
174 DequeueOutput();
175 }
176
177 void AndroidVideoDecodeAccelerator::QueueInput() {
178 DCHECK(thread_checker_.CalledOnValidThread());
179 if (bitstreams_notified_in_advance_.size() > kMaxBitstreamsNotifiedInAdvance)
180 return;
181 if (pending_bitstream_buffers_.empty())
182 return;
183
184 int input_buf_index = 0;
185 media::MediaCodecStatus status = media_codec_->DequeueInputBuffer(
186 NoWaitTimeOut(), &input_buf_index);
187 if (status != media::MEDIA_CODEC_OK) {
188 DCHECK(status == media::MEDIA_CODEC_DEQUEUE_INPUT_AGAIN_LATER ||
189 status == media::MEDIA_CODEC_ERROR);
190 return;
191 }
192
193 base::Time queued_time = pending_bitstream_buffers_.front().second;
194 UMA_HISTOGRAM_TIMES("Media.AVDA.InputQueueTime",
195 base::Time::Now() - queued_time);
196 media::BitstreamBuffer bitstream_buffer =
197 pending_bitstream_buffers_.front().first;
198 pending_bitstream_buffers_.pop();
199
200 if (bitstream_buffer.id() == -1) {
201 media_codec_->QueueEOS(input_buf_index);
202 return;
203 }
204
205 // Abuse the presentation time argument to propagate the bitstream
206 // buffer ID to the output, so we can report it back to the client in
207 // PictureReady().
208 base::TimeDelta timestamp =
209 base::TimeDelta::FromMicroseconds(bitstream_buffer.id());
210
211 scoped_ptr<base::SharedMemory> shm(
212 new base::SharedMemory(bitstream_buffer.handle(), true));
213
214 RETURN_ON_FAILURE(shm->Map(bitstream_buffer.size()),
215 "Failed to SharedMemory::Map()",
216 UNREADABLE_INPUT);
217
218 status =
219 media_codec_->QueueInputBuffer(input_buf_index,
220 static_cast<const uint8*>(shm->memory()),
221 bitstream_buffer.size(),
222 timestamp);
223 RETURN_ON_FAILURE(status == media::MEDIA_CODEC_OK,
224 "Failed to QueueInputBuffer: " << status,
225 PLATFORM_FAILURE);
226
227 // We should call NotifyEndOfBitstreamBuffer(), when no more decoded output
228 // will be returned from the bitstream buffer. However, MediaCodec API is
229 // not enough to guarantee it.
230 // So, here, we calls NotifyEndOfBitstreamBuffer() in advance in order to
231 // keep getting more bitstreams from the client, and throttle them by using
232 // |bitstreams_notified_in_advance_|.
233 // TODO(dwkang): check if there is a way to remove this workaround.
234 base::MessageLoop::current()->PostTask(
235 FROM_HERE,
236 base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer,
237 weak_this_factory_.GetWeakPtr(),
238 bitstream_buffer.id()));
239 bitstreams_notified_in_advance_.push_back(bitstream_buffer.id());
240 }
241
242 void AndroidVideoDecodeAccelerator::DequeueOutput() {
243 DCHECK(thread_checker_.CalledOnValidThread());
244 if (picturebuffers_requested_ && output_picture_buffers_.empty())
245 return;
246
247 if (!output_picture_buffers_.empty() && free_picture_ids_.empty()) {
248 // Don't have any picture buffer to send. Need to wait more.
249 return;
250 }
251
252 bool eos = false;
253 base::TimeDelta timestamp;
254 int32 buf_index = 0;
255 do {
256 size_t offset = 0;
257 size_t size = 0;
258
259 media::MediaCodecStatus status = media_codec_->DequeueOutputBuffer(
260 NoWaitTimeOut(), &buf_index, &offset, &size, &timestamp, &eos, NULL);
261 switch (status) {
262 case media::MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER:
263 case media::MEDIA_CODEC_ERROR:
264 return;
265
266 case media::MEDIA_CODEC_OUTPUT_FORMAT_CHANGED: {
267 int32 width, height;
268 media_codec_->GetOutputFormat(&width, &height);
269
270 if (!picturebuffers_requested_) {
271 picturebuffers_requested_ = true;
272 size_ = gfx::Size(width, height);
273 base::MessageLoop::current()->PostTask(
274 FROM_HERE,
275 base::Bind(&AndroidVideoDecodeAccelerator::RequestPictureBuffers,
276 weak_this_factory_.GetWeakPtr()));
277 } else {
278 // Dynamic resolution change support is not specified by the Android
279 // platform at and before JB-MR1, so it's not possible to smoothly
280 // continue playback at this point. Instead, error out immediately,
281 // expecting clients to Reset() as appropriate to avoid this.
282 // b/7093648
283 RETURN_ON_FAILURE(size_ == gfx::Size(width, height),
284 "Dynamic resolution change is not supported.",
285 PLATFORM_FAILURE);
286 }
287 return;
288 }
289
290 case media::MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED:
291 break;
292
293 case media::MEDIA_CODEC_OK:
294 DCHECK_GE(buf_index, 0);
295 break;
296
297 default:
298 NOTREACHED();
299 break;
300 }
301 } while (buf_index < 0);
302
303 // This ignores the emitted ByteBuffer and instead relies on rendering to the
304 // codec's SurfaceTexture and then copying from that texture to the client's
305 // PictureBuffer's texture. This means that each picture's data is written
306 // three times: once to the ByteBuffer, once to the SurfaceTexture, and once
307 // to the client's texture. It would be nicer to either:
308 // 1) Render directly to the client's texture from MediaCodec (one write); or
309 // 2) Upload the ByteBuffer to the client's texture (two writes).
310 // Unfortunately neither is possible:
311 // 1) MediaCodec's use of SurfaceTexture is a singleton, and the texture
312 // written to can't change during the codec's lifetime. b/11990461
313 // 2) The ByteBuffer is likely to contain the pixels in a vendor-specific,
314 // opaque/non-standard format. It's not possible to negotiate the decoder
315 // to emit a specific colorspace, even using HW CSC. b/10706245
316 // So, we live with these two extra copies per picture :(
317 media_codec_->ReleaseOutputBuffer(buf_index, true);
318
319 if (eos) {
320 base::MessageLoop::current()->PostTask(
321 FROM_HERE,
322 base::Bind(&AndroidVideoDecodeAccelerator::NotifyFlushDone,
323 weak_this_factory_.GetWeakPtr()));
324 } else {
325 int64 bitstream_buffer_id = timestamp.InMicroseconds();
326 SendCurrentSurfaceToClient(static_cast<int32>(bitstream_buffer_id));
327
328 // Removes ids former or equal than the id from decoder. Note that
329 // |bitstreams_notified_in_advance_| does not mean bitstream ids in decoder
330 // because of frame reordering issue. We just maintain this roughly and use
331 // for the throttling purpose.
332 std::list<int32>::iterator it;
333 for (it = bitstreams_notified_in_advance_.begin();
334 it != bitstreams_notified_in_advance_.end();
335 ++it) {
336 if (*it == bitstream_buffer_id) {
337 bitstreams_notified_in_advance_.erase(
338 bitstreams_notified_in_advance_.begin(), ++it);
339 break;
340 }
341 }
342 }
343 }
344
345 void AndroidVideoDecodeAccelerator::SendCurrentSurfaceToClient(
346 int32 bitstream_id) {
347 DCHECK(thread_checker_.CalledOnValidThread());
348 DCHECK_NE(bitstream_id, -1);
349 DCHECK(!free_picture_ids_.empty());
350
351 RETURN_ON_FAILURE(make_context_current_.Run(),
352 "Failed to make this decoder's GL context current.",
353 PLATFORM_FAILURE);
354
355 int32 picture_buffer_id = free_picture_ids_.front();
356 free_picture_ids_.pop();
357
358 float transfrom_matrix[16];
359 surface_texture_->UpdateTexImage();
360 surface_texture_->GetTransformMatrix(transfrom_matrix);
361
362 OutputBufferMap::const_iterator i =
363 output_picture_buffers_.find(picture_buffer_id);
364 RETURN_ON_FAILURE(i != output_picture_buffers_.end(),
365 "Can't find a PictureBuffer for " << picture_buffer_id,
366 PLATFORM_FAILURE);
367 uint32 picture_buffer_texture_id = i->second.texture_id();
368
369 RETURN_ON_FAILURE(gl_decoder_.get(),
370 "Failed to get gles2 decoder instance.",
371 ILLEGAL_STATE);
372 // Defer initializing the CopyTextureCHROMIUMResourceManager until it is
373 // needed because it takes 10s of milliseconds to initialize.
374 if (!copier_) {
375 copier_.reset(new gpu::CopyTextureCHROMIUMResourceManager());
376 copier_->Initialize(gl_decoder_.get());
377 }
378
379 // Here, we copy |surface_texture_id_| to the picture buffer instead of
380 // setting new texture to |surface_texture_| by calling attachToGLContext()
381 // because:
382 // 1. Once we call detachFrameGLContext(), it deletes the texture previous
383 // attached.
384 // 2. SurfaceTexture requires us to apply a transform matrix when we show
385 // the texture.
386 // TODO(hkuang): get the StreamTexture transform matrix in GPU process
387 // instead of using default matrix crbug.com/226218.
388 const static GLfloat default_matrix[16] = {1.0f, 0.0f, 0.0f, 0.0f,
389 0.0f, 1.0f, 0.0f, 0.0f,
390 0.0f, 0.0f, 1.0f, 0.0f,
391 0.0f, 0.0f, 0.0f, 1.0f};
392 copier_->DoCopyTextureWithTransform(
393 gl_decoder_.get(), GL_TEXTURE_EXTERNAL_OES, surface_texture_id_,
394 GL_TEXTURE_2D, picture_buffer_texture_id, GL_RGBA, GL_UNSIGNED_BYTE,
395 size_.width(), size_.height(), false, false, false, nullptr,
396 default_matrix);
397
398 // TODO(henryhsu): Pass (0, 0) as visible size will cause several test
399 // cases failed. We should make sure |size_| is coded size or visible size.
400 base::MessageLoop::current()->PostTask(
401 FROM_HERE, base::Bind(&AndroidVideoDecodeAccelerator::NotifyPictureReady,
402 weak_this_factory_.GetWeakPtr(),
403 media::Picture(picture_buffer_id, bitstream_id,
404 gfx::Rect(size_), false)));
405 }
406
407 void AndroidVideoDecodeAccelerator::Decode(
408 const media::BitstreamBuffer& bitstream_buffer) {
409 DCHECK(thread_checker_.CalledOnValidThread());
410 if (bitstream_buffer.id() != -1 && bitstream_buffer.size() == 0) {
411 base::MessageLoop::current()->PostTask(
412 FROM_HERE,
413 base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer,
414 weak_this_factory_.GetWeakPtr(),
415 bitstream_buffer.id()));
416 return;
417 }
418
419 pending_bitstream_buffers_.push(
420 std::make_pair(bitstream_buffer, base::Time::Now()));
421
422 DoIOTask();
423 }
424
425 void AndroidVideoDecodeAccelerator::AssignPictureBuffers(
426 const std::vector<media::PictureBuffer>& buffers) {
427 DCHECK(thread_checker_.CalledOnValidThread());
428 DCHECK(output_picture_buffers_.empty());
429 DCHECK(free_picture_ids_.empty());
430
431 for (size_t i = 0; i < buffers.size(); ++i) {
432 RETURN_ON_FAILURE(buffers[i].size() == size_,
433 "Invalid picture buffer size was passed.",
434 INVALID_ARGUMENT);
435 int32 id = buffers[i].id();
436 output_picture_buffers_.insert(std::make_pair(id, buffers[i]));
437 free_picture_ids_.push(id);
438 // Since the client might be re-using |picture_buffer_id| values, forget
439 // about previously-dismissed IDs now. See ReusePictureBuffer() comment
440 // about "zombies" for why we maintain this set in the first place.
441 dismissed_picture_ids_.erase(id);
442 }
443
444 RETURN_ON_FAILURE(output_picture_buffers_.size() >= kNumPictureBuffers,
445 "Invalid picture buffers were passed.",
446 INVALID_ARGUMENT);
447
448 DoIOTask();
449 }
450
451 void AndroidVideoDecodeAccelerator::ReusePictureBuffer(
452 int32 picture_buffer_id) {
453 DCHECK(thread_checker_.CalledOnValidThread());
454
455 // This ReusePictureBuffer() might have been in a pipe somewhere (queued in
456 // IPC, or in a PostTask either at the sender or receiver) when we sent a
457 // DismissPictureBuffer() for this |picture_buffer_id|. Account for such
458 // potential "zombie" IDs here.
459 if (dismissed_picture_ids_.erase(picture_buffer_id))
460 return;
461
462 free_picture_ids_.push(picture_buffer_id);
463
464 DoIOTask();
465 }
466
467 void AndroidVideoDecodeAccelerator::Flush() {
468 DCHECK(thread_checker_.CalledOnValidThread());
469
470 Decode(media::BitstreamBuffer(-1, base::SharedMemoryHandle(), 0));
471 }
472
473 bool AndroidVideoDecodeAccelerator::ConfigureMediaCodec() {
474 DCHECK(thread_checker_.CalledOnValidThread());
475 DCHECK(surface_texture_.get());
476
477 gfx::ScopedJavaSurface surface(surface_texture_.get());
478
479 // Pass a dummy 320x240 canvas size and let the codec signal the real size
480 // when it's known from the bitstream.
481 media_codec_.reset(media::VideoCodecBridge::CreateDecoder(
482 codec_, false, gfx::Size(320, 240), surface.j_surface().obj(), NULL));
483 if (!media_codec_)
484 return false;
485
486 io_timer_.Start(FROM_HERE,
487 DecodePollDelay(),
488 this,
489 &AndroidVideoDecodeAccelerator::DoIOTask);
490 return true;
491 }
492
493 void AndroidVideoDecodeAccelerator::Reset() {
494 DCHECK(thread_checker_.CalledOnValidThread());
495
496 while (!pending_bitstream_buffers_.empty()) {
497 int32 bitstream_buffer_id = pending_bitstream_buffers_.front().first.id();
498 pending_bitstream_buffers_.pop();
499
500 if (bitstream_buffer_id != -1) {
501 base::MessageLoop::current()->PostTask(
502 FROM_HERE,
503 base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer,
504 weak_this_factory_.GetWeakPtr(),
505 bitstream_buffer_id));
506 }
507 }
508 bitstreams_notified_in_advance_.clear();
509
510 for (OutputBufferMap::iterator it = output_picture_buffers_.begin();
511 it != output_picture_buffers_.end();
512 ++it) {
513 client_->DismissPictureBuffer(it->first);
514 dismissed_picture_ids_.insert(it->first);
515 }
516 output_picture_buffers_.clear();
517 std::queue<int32> empty;
518 std::swap(free_picture_ids_, empty);
519 CHECK(free_picture_ids_.empty());
520 picturebuffers_requested_ = false;
521
522 // On some devices, and up to at least JB-MR1,
523 // - flush() can fail after EOS (b/8125974); and
524 // - mid-stream resolution change is unsupported (b/7093648).
525 // To cope with these facts, we always stop & restart the codec on Reset().
526 io_timer_.Stop();
527 media_codec_->Stop();
528 ConfigureMediaCodec();
529 state_ = NO_ERROR;
530
531 base::MessageLoop::current()->PostTask(
532 FROM_HERE,
533 base::Bind(&AndroidVideoDecodeAccelerator::NotifyResetDone,
534 weak_this_factory_.GetWeakPtr()));
535 }
536
537 void AndroidVideoDecodeAccelerator::Destroy() {
538 DCHECK(thread_checker_.CalledOnValidThread());
539
540 weak_this_factory_.InvalidateWeakPtrs();
541 if (media_codec_) {
542 io_timer_.Stop();
543 media_codec_->Stop();
544 }
545 if (surface_texture_id_)
546 glDeleteTextures(1, &surface_texture_id_);
547 if (copier_)
548 copier_->Destroy();
549 delete this;
550 }
551
552 bool AndroidVideoDecodeAccelerator::CanDecodeOnIOThread() {
553 return false;
554 }
555
556 void AndroidVideoDecodeAccelerator::RequestPictureBuffers() {
557 client_->ProvidePictureBuffers(kNumPictureBuffers, size_, GL_TEXTURE_2D);
558 }
559
560 void AndroidVideoDecodeAccelerator::NotifyPictureReady(
561 const media::Picture& picture) {
562 client_->PictureReady(picture);
563 }
564
565 void AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer(
566 int input_buffer_id) {
567 client_->NotifyEndOfBitstreamBuffer(input_buffer_id);
568 }
569
570 void AndroidVideoDecodeAccelerator::NotifyFlushDone() {
571 client_->NotifyFlushDone();
572 }
573
574 void AndroidVideoDecodeAccelerator::NotifyResetDone() {
575 client_->NotifyResetDone();
576 }
577
578 void AndroidVideoDecodeAccelerator::NotifyError(
579 media::VideoDecodeAccelerator::Error error) {
580 client_->NotifyError(error);
581 }
582
583 // static
584 media::VideoDecodeAccelerator::SupportedProfiles
585 AndroidVideoDecodeAccelerator::GetSupportedProfiles() {
586 SupportedProfiles profiles;
587
588 if (!media::VideoCodecBridge::IsKnownUnaccelerated(
589 media::kCodecVP8, media::MEDIA_CODEC_DECODER)) {
590 SupportedProfile profile;
591 profile.profile = media::VP8PROFILE_ANY;
592 profile.min_resolution.SetSize(0, 0);
593 profile.max_resolution.SetSize(1920, 1088);
594 profiles.push_back(profile);
595 }
596
597 #if defined(ENABLE_MEDIA_PIPELINE_ON_ANDROID)
598 if (!media::VideoCodecBridge::IsKnownUnaccelerated(
599 media::kCodecVP9, media::MEDIA_CODEC_DECODER)) {
600 SupportedProfile profile;
601 profile.profile = media::VP9PROFILE_ANY;
602 profile.min_resolution.SetSize(0, 0);
603 profile.max_resolution.SetSize(1920, 1088);
604 profiles.push_back(profile);
605 }
606
607 for (const auto& supported_profile : kSupportedH264Profiles) {
608 SupportedProfile profile;
609 profile.profile = supported_profile;
610 profile.min_resolution.SetSize(0, 0);
611 // Advertise support for 4k and let the MediaCodec fail when decoding if it
612 // doesn't support the resolution. It's assumed that consumers won't have
613 // software fallback for H264 on Android anyway.
614 profile.max_resolution.SetSize(3840, 2160);
615 profiles.push_back(profile);
616 }
617 #endif
618
619 return profiles;
620 }
621
622 } // namespace content
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698