OLD | NEW |
| (Empty) |
1 // Copyright (c) 2013 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "content/common/gpu/media/android_video_decode_accelerator.h" | |
6 | |
7 #include <stddef.h> | |
8 | |
9 #include "base/android/build_info.h" | |
10 #include "base/auto_reset.h" | |
11 #include "base/bind.h" | |
12 #include "base/bind_helpers.h" | |
13 #include "base/command_line.h" | |
14 #include "base/lazy_instance.h" | |
15 #include "base/logging.h" | |
16 #include "base/message_loop/message_loop.h" | |
17 #include "base/metrics/histogram.h" | |
18 #include "base/task_runner_util.h" | |
19 #include "base/trace_event/trace_event.h" | |
20 #include "content/common/gpu/media/android_copying_backing_strategy.h" | |
21 #include "content/common/gpu/media/android_deferred_rendering_backing_strategy.h
" | |
22 #include "content/common/gpu/media/avda_return_on_failure.h" | |
23 #include "content/common/gpu/media/shared_memory_region.h" | |
24 #include "gpu/command_buffer/service/gles2_cmd_decoder.h" | |
25 #include "gpu/command_buffer/service/mailbox_manager.h" | |
26 #include "gpu/ipc/service/gpu_channel.h" | |
27 #include "media/base/android/media_codec_bridge.h" | |
28 #include "media/base/android/media_codec_util.h" | |
29 #include "media/base/bind_to_current_loop.h" | |
30 #include "media/base/bitstream_buffer.h" | |
31 #include "media/base/limits.h" | |
32 #include "media/base/media.h" | |
33 #include "media/base/timestamp_constants.h" | |
34 #include "media/base/video_decoder_config.h" | |
35 #include "media/video/picture.h" | |
36 #include "ui/gl/android/scoped_java_surface.h" | |
37 #include "ui/gl/android/surface_texture.h" | |
38 #include "ui/gl/gl_bindings.h" | |
39 | |
40 #if defined(ENABLE_MOJO_MEDIA_IN_GPU_PROCESS) | |
41 #include "media/mojo/services/mojo_cdm_service.h" | |
42 #endif | |
43 | |
44 #define POST_ERROR(error_code, error_message) \ | |
45 do { \ | |
46 DLOG(ERROR) << error_message; \ | |
47 PostError(FROM_HERE, media::VideoDecodeAccelerator::error_code); \ | |
48 } while (0) | |
49 | |
50 namespace content { | |
51 | |
52 enum { kNumPictureBuffers = media::limits::kMaxVideoFrames + 1 }; | |
53 | |
54 // Max number of bitstreams notified to the client with | |
55 // NotifyEndOfBitstreamBuffer() before getting output from the bitstream. | |
56 enum { kMaxBitstreamsNotifiedInAdvance = 32 }; | |
57 | |
58 // MediaCodec is only guaranteed to support baseline, but some devices may | |
59 // support others. Advertise support for all H264 profiles and let the | |
60 // MediaCodec fail when decoding if it's not actually supported. It's assumed | |
61 // that consumers won't have software fallback for H264 on Android anyway. | |
62 static const media::VideoCodecProfile kSupportedH264Profiles[] = { | |
63 media::H264PROFILE_BASELINE, | |
64 media::H264PROFILE_MAIN, | |
65 media::H264PROFILE_EXTENDED, | |
66 media::H264PROFILE_HIGH, | |
67 media::H264PROFILE_HIGH10PROFILE, | |
68 media::H264PROFILE_HIGH422PROFILE, | |
69 media::H264PROFILE_HIGH444PREDICTIVEPROFILE, | |
70 media::H264PROFILE_SCALABLEBASELINE, | |
71 media::H264PROFILE_SCALABLEHIGH, | |
72 media::H264PROFILE_STEREOHIGH, | |
73 media::H264PROFILE_MULTIVIEWHIGH | |
74 }; | |
75 | |
76 // Because MediaCodec is thread-hostile (must be poked on a single thread) and | |
77 // has no callback mechanism (b/11990118), we must drive it by polling for | |
78 // complete frames (and available input buffers, when the codec is fully | |
79 // saturated). This function defines the polling delay. The value used is an | |
80 // arbitrary choice that trades off CPU utilization (spinning) against latency. | |
81 // Mirrors android_video_encode_accelerator.cc:EncodePollDelay(). | |
82 static inline const base::TimeDelta DecodePollDelay() { | |
83 // An alternative to this polling scheme could be to dedicate a new thread | |
84 // (instead of using the ChildThread) to run the MediaCodec, and make that | |
85 // thread use the timeout-based flavor of MediaCodec's dequeue methods when it | |
86 // believes the codec should complete "soon" (e.g. waiting for an input | |
87 // buffer, or waiting for a picture when it knows enough complete input | |
88 // pictures have been fed to saturate any internal buffering). This is | |
89 // speculative and it's unclear that this would be a win (nor that there's a | |
90 // reasonably device-agnostic way to fill in the "believes" above). | |
91 return base::TimeDelta::FromMilliseconds(10); | |
92 } | |
93 | |
94 static inline const base::TimeDelta NoWaitTimeOut() { | |
95 return base::TimeDelta::FromMicroseconds(0); | |
96 } | |
97 | |
98 static inline const base::TimeDelta IdleTimerTimeOut() { | |
99 return base::TimeDelta::FromSeconds(1); | |
100 } | |
101 | |
102 // Time between when we notice an error, and when we actually notify somebody. | |
103 // This is to prevent codec errors caused by SurfaceView fullscreen transitions | |
104 // from breaking the pipeline, if we're about to be reset anyway. | |
105 static inline const base::TimeDelta ErrorPostingDelay() { | |
106 return base::TimeDelta::FromSeconds(2); | |
107 } | |
108 | |
109 // For RecordFormatChangedMetric. | |
110 enum FormatChangedValue { | |
111 CodecInitialized = false, | |
112 MissingFormatChanged = true | |
113 }; | |
114 | |
115 static inline void RecordFormatChangedMetric(FormatChangedValue value) { | |
116 UMA_HISTOGRAM_BOOLEAN("Media.AVDA.MissingFormatChanged", !!value); | |
117 } | |
118 | |
119 // Handle OnFrameAvailable callbacks safely. Since they occur asynchronously, | |
120 // we take care that the AVDA that wants them still exists. A WeakPtr to | |
121 // the AVDA would be preferable, except that OnFrameAvailable callbacks can | |
122 // occur off the gpu main thread. We also can't guarantee when the | |
123 // SurfaceTexture will quit sending callbacks to coordinate with the | |
124 // destruction of the AVDA, so we have a separate object that the cb can own. | |
125 class AndroidVideoDecodeAccelerator::OnFrameAvailableHandler | |
126 : public base::RefCountedThreadSafe<OnFrameAvailableHandler> { | |
127 public: | |
128 // We do not retain ownership of |owner|. It must remain valid until | |
129 // after ClearOwner() is called. This will register with | |
130 // |surface_texture| to receive OnFrameAvailable callbacks. | |
131 OnFrameAvailableHandler( | |
132 AndroidVideoDecodeAccelerator* owner, | |
133 const scoped_refptr<gfx::SurfaceTexture>& surface_texture) | |
134 : owner_(owner) { | |
135 // Note that the callback owns a strong ref to us. | |
136 surface_texture->SetFrameAvailableCallbackOnAnyThread( | |
137 base::Bind(&OnFrameAvailableHandler::OnFrameAvailable, | |
138 scoped_refptr<OnFrameAvailableHandler>(this))); | |
139 } | |
140 | |
141 // Forget about our owner, which is required before one deletes it. | |
142 // No further callbacks will happen once this completes. | |
143 void ClearOwner() { | |
144 base::AutoLock lock(lock_); | |
145 // No callback can happen until we release the lock. | |
146 owner_ = nullptr; | |
147 } | |
148 | |
149 // Call back into our owner if it hasn't been deleted. | |
150 void OnFrameAvailable() { | |
151 base::AutoLock auto_lock(lock_); | |
152 // |owner_| can't be deleted while we have the lock. | |
153 if (owner_) | |
154 owner_->OnFrameAvailable(); | |
155 } | |
156 | |
157 private: | |
158 friend class base::RefCountedThreadSafe<OnFrameAvailableHandler>; | |
159 virtual ~OnFrameAvailableHandler() {} | |
160 | |
161 // Protects changes to owner_. | |
162 base::Lock lock_; | |
163 | |
164 // AVDA that wants the OnFrameAvailable callback. | |
165 AndroidVideoDecodeAccelerator* owner_; | |
166 | |
167 DISALLOW_COPY_AND_ASSIGN(OnFrameAvailableHandler); | |
168 }; | |
169 | |
170 // Helper class to share an IO timer for DoIOTask() execution; prevents each | |
171 // AVDA instance from starting its own high frequency timer. The intuition | |
172 // behind this is that, if we're waiting for long enough, then either (a) | |
173 // MediaCodec is broken or (b) MediaCodec is waiting on us to change state | |
174 // (e.g., get new demuxed data / get a free picture buffer / return an output | |
175 // buffer to MediaCodec). This is inherently a race, since we don't know if | |
176 // MediaCodec is broken or just slow. Since the MediaCodec API doesn't let | |
177 // us wait on MediaCodec state changes prior to L, we more or less have to | |
178 // time out or keep polling forever in some common cases. | |
179 class AVDATimerManager { | |
180 public: | |
181 // Make sure that the construction thread is started for |avda_instance|. | |
182 void StartThread(AndroidVideoDecodeAccelerator* avda_instance) { | |
183 if (thread_avda_instances_.empty()) | |
184 construction_thread_.Start(); | |
185 | |
186 thread_avda_instances_.insert(avda_instance); | |
187 } | |
188 | |
189 // |avda_instance| will no longer need the construction thread. Stop the | |
190 // thread if this is the last instance. | |
191 void StopThread(AndroidVideoDecodeAccelerator* avda_instance) { | |
192 thread_avda_instances_.erase(avda_instance); | |
193 if (thread_avda_instances_.empty()) | |
194 construction_thread_.Stop(); | |
195 } | |
196 | |
197 // Request periodic callback of |avda_instance|->DoIOTask(). Does nothing if | |
198 // the instance is already registered and the timer started. The first request | |
199 // will start the repeating timer on an interval of DecodePollDelay(). | |
200 void StartTimer(AndroidVideoDecodeAccelerator* avda_instance) { | |
201 timer_avda_instances_.insert(avda_instance); | |
202 | |
203 // If the timer is running, StopTimer() might have been called earlier, if | |
204 // so remove the instance from the pending erasures. | |
205 if (timer_running_) | |
206 pending_erase_.erase(avda_instance); | |
207 | |
208 if (io_timer_.IsRunning()) | |
209 return; | |
210 io_timer_.Start(FROM_HERE, DecodePollDelay(), this, | |
211 &AVDATimerManager::RunTimer); | |
212 } | |
213 | |
214 // Stop callbacks to |avda_instance|->DoIOTask(). Does nothing if the instance | |
215 // is not registered. If there are no instances left, the repeating timer will | |
216 // be stopped. | |
217 void StopTimer(AndroidVideoDecodeAccelerator* avda_instance) { | |
218 // If the timer is running, defer erasures to avoid iterator invalidation. | |
219 if (timer_running_) { | |
220 pending_erase_.insert(avda_instance); | |
221 return; | |
222 } | |
223 | |
224 timer_avda_instances_.erase(avda_instance); | |
225 if (timer_avda_instances_.empty()) | |
226 io_timer_.Stop(); | |
227 } | |
228 | |
229 // Eventually, we should run the timer on this thread. For now, we just keep | |
230 // it as a convenience for construction. | |
231 scoped_refptr<base::SingleThreadTaskRunner> ConstructionTaskRunner() { | |
232 return construction_thread_.task_runner(); | |
233 } | |
234 | |
235 private: | |
236 friend struct base::DefaultLazyInstanceTraits<AVDATimerManager>; | |
237 | |
238 AVDATimerManager() : construction_thread_("AVDAThread") {} | |
239 ~AVDATimerManager() { NOTREACHED(); } | |
240 | |
241 void RunTimer() { | |
242 { | |
243 // Call out to all AVDA instances, some of which may attempt to remove | |
244 // themselves from the list during this operation; those removals will be | |
245 // deferred until after all iterations are complete. | |
246 base::AutoReset<bool> scoper(&timer_running_, true); | |
247 for (auto* avda : timer_avda_instances_) | |
248 avda->DoIOTask(false); | |
249 } | |
250 | |
251 // Take care of any deferred erasures. | |
252 for (auto* avda : pending_erase_) | |
253 StopTimer(avda); | |
254 pending_erase_.clear(); | |
255 | |
256 // TODO(dalecurtis): We may want to consider chunking this if task execution | |
257 // takes too long for the combined timer. | |
258 } | |
259 | |
260 // All AVDA instances that would like us to poll DoIOTask. | |
261 std::set<AndroidVideoDecodeAccelerator*> timer_avda_instances_; | |
262 | |
263 // All AVDA instances that might like to use the construction thread. | |
264 std::set<AndroidVideoDecodeAccelerator*> thread_avda_instances_; | |
265 | |
266 // Since we can't delete while iterating when using a set, defer erasure until | |
267 // after iteration complete. | |
268 bool timer_running_ = false; | |
269 std::set<AndroidVideoDecodeAccelerator*> pending_erase_; | |
270 | |
271 // Repeating timer responsible for draining pending IO to the codecs. | |
272 base::RepeatingTimer io_timer_; | |
273 | |
274 base::Thread construction_thread_; | |
275 | |
276 DISALLOW_COPY_AND_ASSIGN(AVDATimerManager); | |
277 }; | |
278 | |
279 static base::LazyInstance<AVDATimerManager>::Leaky g_avda_timer = | |
280 LAZY_INSTANCE_INITIALIZER; | |
281 | |
282 AndroidVideoDecodeAccelerator::CodecConfig::CodecConfig() {} | |
283 | |
284 AndroidVideoDecodeAccelerator::CodecConfig::~CodecConfig() {} | |
285 | |
286 AndroidVideoDecodeAccelerator::AndroidVideoDecodeAccelerator( | |
287 const MakeGLContextCurrentCallback& make_context_current_cb, | |
288 const GetGLES2DecoderCallback& get_gles2_decoder_cb) | |
289 : client_(NULL), | |
290 make_context_current_cb_(make_context_current_cb), | |
291 get_gles2_decoder_cb_(get_gles2_decoder_cb), | |
292 is_encrypted_(false), | |
293 state_(NO_ERROR), | |
294 picturebuffers_requested_(false), | |
295 media_drm_bridge_cdm_context_(nullptr), | |
296 cdm_registration_id_(0), | |
297 pending_input_buf_index_(-1), | |
298 error_sequence_token_(0), | |
299 defer_errors_(false), | |
300 deferred_initialization_pending_(false), | |
301 weak_this_factory_(this) {} | |
302 | |
303 AndroidVideoDecodeAccelerator::~AndroidVideoDecodeAccelerator() { | |
304 DCHECK(thread_checker_.CalledOnValidThread()); | |
305 g_avda_timer.Pointer()->StopTimer(this); | |
306 g_avda_timer.Pointer()->StopThread(this); | |
307 | |
308 #if defined(ENABLE_MOJO_MEDIA_IN_GPU_PROCESS) | |
309 if (!media_drm_bridge_cdm_context_) | |
310 return; | |
311 | |
312 DCHECK(cdm_registration_id_); | |
313 media_drm_bridge_cdm_context_->UnregisterPlayer(cdm_registration_id_); | |
314 #endif // defined(ENABLE_MOJO_MEDIA_IN_GPU_PROCESS) | |
315 } | |
316 | |
317 bool AndroidVideoDecodeAccelerator::Initialize(const Config& config, | |
318 Client* client) { | |
319 DCHECK(!media_codec_); | |
320 DCHECK(thread_checker_.CalledOnValidThread()); | |
321 TRACE_EVENT0("media", "AVDA::Initialize"); | |
322 | |
323 DVLOG(1) << __FUNCTION__ << ": " << config.AsHumanReadableString(); | |
324 | |
325 if (make_context_current_cb_.is_null() || get_gles2_decoder_cb_.is_null()) { | |
326 NOTREACHED() << "GL callbacks are required for this VDA"; | |
327 return false; | |
328 } | |
329 | |
330 DCHECK(client); | |
331 client_ = client; | |
332 codec_config_ = new CodecConfig(); | |
333 codec_config_->codec_ = VideoCodecProfileToVideoCodec(config.profile); | |
334 codec_config_->initial_expected_coded_size_ = | |
335 config.initial_expected_coded_size; | |
336 is_encrypted_ = config.is_encrypted; | |
337 | |
338 bool profile_supported = codec_config_->codec_ == media::kCodecVP8 || | |
339 codec_config_->codec_ == media::kCodecVP9 || | |
340 codec_config_->codec_ == media::kCodecH264; | |
341 | |
342 // We signalled that we support deferred initialization, so see if the client | |
343 // does also. | |
344 deferred_initialization_pending_ = config.is_deferred_initialization_allowed; | |
345 | |
346 if (!profile_supported) { | |
347 LOG(ERROR) << "Unsupported profile: " << config.profile; | |
348 return false; | |
349 } | |
350 | |
351 // For encrypted streams we postpone configuration until MediaCrypto is | |
352 // available. | |
353 DCHECK(!is_encrypted_ || deferred_initialization_pending_); | |
354 | |
355 // Only use MediaCodec for VP8/9 if it's likely backed by hardware | |
356 // or if the stream is encrypted. | |
357 if ((codec_config_->codec_ == media::kCodecVP8 || | |
358 codec_config_->codec_ == media::kCodecVP9) && | |
359 !is_encrypted_ && | |
360 media::VideoCodecBridge::IsKnownUnaccelerated( | |
361 codec_config_->codec_, media::MEDIA_CODEC_DECODER)) { | |
362 DVLOG(1) << "Initialization failed: " | |
363 << (codec_config_->codec_ == media::kCodecVP8 ? "vp8" : "vp9") | |
364 << " is not hardware accelerated"; | |
365 return false; | |
366 } | |
367 | |
368 auto gles_decoder = get_gles2_decoder_cb_.Run(); | |
369 if (!gles_decoder) { | |
370 LOG(ERROR) << "Failed to get gles2 decoder instance."; | |
371 return false; | |
372 } | |
373 | |
374 const gpu::GpuPreferences& gpu_preferences = | |
375 gles_decoder->GetContextGroup()->gpu_preferences(); | |
376 | |
377 if (UseDeferredRenderingStrategy(gpu_preferences)) { | |
378 // TODO(liberato, watk): Figure out what we want to do about zero copy for | |
379 // fullscreen external SurfaceView in WebView. http://crbug.com/582170. | |
380 DCHECK(!gles_decoder->GetContextGroup()->mailbox_manager()->UsesSync()); | |
381 DVLOG(1) << __FUNCTION__ << ", using deferred rendering strategy."; | |
382 strategy_.reset(new AndroidDeferredRenderingBackingStrategy(this)); | |
383 } else { | |
384 DVLOG(1) << __FUNCTION__ << ", using copy back strategy."; | |
385 strategy_.reset(new AndroidCopyingBackingStrategy(this)); | |
386 } | |
387 | |
388 if (!make_context_current_cb_.Run()) { | |
389 LOG(ERROR) << "Failed to make this decoder's GL context current."; | |
390 return false; | |
391 } | |
392 | |
393 codec_config_->surface_ = strategy_->Initialize(config.surface_id); | |
394 if (codec_config_->surface_.IsEmpty()) { | |
395 LOG(ERROR) << "Failed to initialize the backing strategy. The returned " | |
396 "Java surface is empty."; | |
397 return false; | |
398 } | |
399 | |
400 // TODO(watk,liberato): move this into the strategy. | |
401 scoped_refptr<gfx::SurfaceTexture> surface_texture = | |
402 strategy_->GetSurfaceTexture(); | |
403 if (surface_texture) { | |
404 on_frame_available_handler_ = | |
405 new OnFrameAvailableHandler(this, surface_texture); | |
406 } | |
407 | |
408 // Start the thread for async configuration, even if we don't need it now. | |
409 // ResetCodecState might rebuild the codec later, for example. | |
410 g_avda_timer.Pointer()->StartThread(this); | |
411 | |
412 // If we are encrypted, then we aren't able to create the codec yet. | |
413 if (is_encrypted_) | |
414 return true; | |
415 | |
416 if (deferred_initialization_pending_) { | |
417 ConfigureMediaCodecAsynchronously(); | |
418 return true; | |
419 } | |
420 | |
421 // If the client doesn't support deferred initialization (WebRTC), then we | |
422 // should complete it now and return a meaningful result. | |
423 return ConfigureMediaCodecSynchronously(); | |
424 } | |
425 | |
426 void AndroidVideoDecodeAccelerator::SetCdm(int cdm_id) { | |
427 DVLOG(2) << __FUNCTION__ << ": " << cdm_id; | |
428 | |
429 #if defined(ENABLE_MOJO_MEDIA_IN_GPU_PROCESS) | |
430 DCHECK(client_) << "SetCdm() must be called after Initialize()."; | |
431 | |
432 if (media_drm_bridge_cdm_context_) { | |
433 NOTREACHED() << "We do not support resetting CDM."; | |
434 NotifyInitializationComplete(false); | |
435 return; | |
436 } | |
437 | |
438 // Store the CDM to hold a reference to it. | |
439 cdm_for_reference_holding_only_ = media::MojoCdmService::LegacyGetCdm(cdm_id); | |
440 DCHECK(cdm_for_reference_holding_only_); | |
441 | |
442 // On Android platform the CdmContext must be a MediaDrmBridgeCdmContext. | |
443 media_drm_bridge_cdm_context_ = static_cast<media::MediaDrmBridgeCdmContext*>( | |
444 cdm_for_reference_holding_only_->GetCdmContext()); | |
445 DCHECK(media_drm_bridge_cdm_context_); | |
446 | |
447 // Register CDM callbacks. The callbacks registered will be posted back to | |
448 // this thread via BindToCurrentLoop. | |
449 | |
450 // Since |this| holds a reference to the |cdm_|, by the time the CDM is | |
451 // destructed, UnregisterPlayer() must have been called and |this| has been | |
452 // destructed as well. So the |cdm_unset_cb| will never have a chance to be | |
453 // called. | |
454 // TODO(xhwang): Remove |cdm_unset_cb| after it's not used on all platforms. | |
455 cdm_registration_id_ = media_drm_bridge_cdm_context_->RegisterPlayer( | |
456 media::BindToCurrentLoop( | |
457 base::Bind(&AndroidVideoDecodeAccelerator::OnKeyAdded, | |
458 weak_this_factory_.GetWeakPtr())), | |
459 base::Bind(&base::DoNothing)); | |
460 | |
461 media_drm_bridge_cdm_context_->SetMediaCryptoReadyCB(media::BindToCurrentLoop( | |
462 base::Bind(&AndroidVideoDecodeAccelerator::OnMediaCryptoReady, | |
463 weak_this_factory_.GetWeakPtr()))); | |
464 | |
465 // Postpone NotifyInitializationComplete() call till we create the MediaCodec | |
466 // after OnMediaCryptoReady(). | |
467 #else | |
468 | |
469 NOTIMPLEMENTED(); | |
470 NotifyInitializationComplete(false); | |
471 | |
472 #endif // !defined(ENABLE_MOJO_MEDIA_IN_GPU_PROCESS) | |
473 } | |
474 | |
475 void AndroidVideoDecodeAccelerator::DoIOTask(bool start_timer) { | |
476 DCHECK(thread_checker_.CalledOnValidThread()); | |
477 TRACE_EVENT0("media", "AVDA::DoIOTask"); | |
478 if (state_ == ERROR || state_ == WAITING_FOR_CODEC) { | |
479 return; | |
480 } | |
481 | |
482 bool did_work = QueueInput(); | |
483 while (DequeueOutput()) | |
484 did_work = true; | |
485 | |
486 ManageTimer(did_work || start_timer); | |
487 } | |
488 | |
489 bool AndroidVideoDecodeAccelerator::QueueInput() { | |
490 DCHECK(thread_checker_.CalledOnValidThread()); | |
491 TRACE_EVENT0("media", "AVDA::QueueInput"); | |
492 base::AutoReset<bool> auto_reset(&defer_errors_, true); | |
493 if (bitstreams_notified_in_advance_.size() > kMaxBitstreamsNotifiedInAdvance) | |
494 return false; | |
495 if (pending_bitstream_buffers_.empty()) | |
496 return false; | |
497 if (state_ == WAITING_FOR_KEY) | |
498 return false; | |
499 | |
500 int input_buf_index = pending_input_buf_index_; | |
501 | |
502 // Do not dequeue a new input buffer if we failed with MEDIA_CODEC_NO_KEY. | |
503 // That status does not return this buffer back to the pool of | |
504 // available input buffers. We have to reuse it in QueueSecureInputBuffer(). | |
505 if (input_buf_index == -1) { | |
506 media::MediaCodecStatus status = | |
507 media_codec_->DequeueInputBuffer(NoWaitTimeOut(), &input_buf_index); | |
508 switch (status) { | |
509 case media::MEDIA_CODEC_DEQUEUE_INPUT_AGAIN_LATER: | |
510 return false; | |
511 case media::MEDIA_CODEC_ERROR: | |
512 POST_ERROR(PLATFORM_FAILURE, "Failed to DequeueInputBuffer"); | |
513 return false; | |
514 case media::MEDIA_CODEC_OK: | |
515 break; | |
516 default: | |
517 NOTREACHED() << "Unknown DequeueInputBuffer status " << status; | |
518 return false; | |
519 } | |
520 } | |
521 | |
522 DCHECK_NE(input_buf_index, -1); | |
523 | |
524 media::BitstreamBuffer bitstream_buffer = pending_bitstream_buffers_.front(); | |
525 | |
526 if (bitstream_buffer.id() == -1) { | |
527 pending_bitstream_buffers_.pop(); | |
528 TRACE_COUNTER1("media", "AVDA::PendingBitstreamBufferCount", | |
529 pending_bitstream_buffers_.size()); | |
530 | |
531 DCHECK_NE(state_, ERROR); | |
532 state_ = WAITING_FOR_EOS; | |
533 media_codec_->QueueEOS(input_buf_index); | |
534 return true; | |
535 } | |
536 | |
537 std::unique_ptr<SharedMemoryRegion> shm; | |
538 | |
539 if (pending_input_buf_index_ == -1) { | |
540 // When |pending_input_buf_index_| is not -1, the buffer is already dequeued | |
541 // from MediaCodec, filled with data and bitstream_buffer.handle() is | |
542 // closed. | |
543 shm.reset(new SharedMemoryRegion(bitstream_buffer, true)); | |
544 | |
545 if (!shm->Map()) { | |
546 POST_ERROR(UNREADABLE_INPUT, "Failed to SharedMemoryRegion::Map()"); | |
547 return false; | |
548 } | |
549 } | |
550 | |
551 const base::TimeDelta presentation_timestamp = | |
552 bitstream_buffer.presentation_timestamp(); | |
553 DCHECK(presentation_timestamp != media::kNoTimestamp()) | |
554 << "Bitstream buffers must have valid presentation timestamps"; | |
555 | |
556 // There may already be a bitstream buffer with this timestamp, e.g., VP9 alt | |
557 // ref frames, but it's OK to overwrite it because we only expect a single | |
558 // output frame to have that timestamp. AVDA clients only use the bitstream | |
559 // buffer id in the returned Pictures to map a bitstream buffer back to a | |
560 // timestamp on their side, so either one of the bitstream buffer ids will | |
561 // result in them finding the right timestamp. | |
562 bitstream_buffers_in_decoder_[presentation_timestamp] = bitstream_buffer.id(); | |
563 | |
564 // Notice that |memory| will be null if we repeatedly enqueue the same buffer, | |
565 // this happens after MEDIA_CODEC_NO_KEY. | |
566 const uint8_t* memory = | |
567 shm ? static_cast<const uint8_t*>(shm->memory()) : nullptr; | |
568 const std::string& key_id = bitstream_buffer.key_id(); | |
569 const std::string& iv = bitstream_buffer.iv(); | |
570 const std::vector<media::SubsampleEntry>& subsamples = | |
571 bitstream_buffer.subsamples(); | |
572 | |
573 media::MediaCodecStatus status; | |
574 if (key_id.empty() || iv.empty()) { | |
575 status = media_codec_->QueueInputBuffer(input_buf_index, memory, | |
576 bitstream_buffer.size(), | |
577 presentation_timestamp); | |
578 } else { | |
579 status = media_codec_->QueueSecureInputBuffer( | |
580 input_buf_index, memory, bitstream_buffer.size(), key_id, iv, | |
581 subsamples, presentation_timestamp); | |
582 } | |
583 | |
584 DVLOG(2) << __FUNCTION__ | |
585 << ": Queue(Secure)InputBuffer: pts:" << presentation_timestamp | |
586 << " status:" << status; | |
587 | |
588 if (status == media::MEDIA_CODEC_NO_KEY) { | |
589 // Keep trying to enqueue the same input buffer. | |
590 // The buffer is owned by us (not the MediaCodec) and is filled with data. | |
591 DVLOG(1) << "QueueSecureInputBuffer failed: NO_KEY"; | |
592 pending_input_buf_index_ = input_buf_index; | |
593 state_ = WAITING_FOR_KEY; | |
594 return false; | |
595 } | |
596 | |
597 pending_input_buf_index_ = -1; | |
598 pending_bitstream_buffers_.pop(); | |
599 TRACE_COUNTER1("media", "AVDA::PendingBitstreamBufferCount", | |
600 pending_bitstream_buffers_.size()); | |
601 // We should call NotifyEndOfBitstreamBuffer(), when no more decoded output | |
602 // will be returned from the bitstream buffer. However, MediaCodec API is | |
603 // not enough to guarantee it. | |
604 // So, here, we calls NotifyEndOfBitstreamBuffer() in advance in order to | |
605 // keep getting more bitstreams from the client, and throttle them by using | |
606 // |bitstreams_notified_in_advance_|. | |
607 // TODO(dwkang): check if there is a way to remove this workaround. | |
608 base::MessageLoop::current()->PostTask( | |
609 FROM_HERE, | |
610 base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, | |
611 weak_this_factory_.GetWeakPtr(), bitstream_buffer.id())); | |
612 bitstreams_notified_in_advance_.push_back(bitstream_buffer.id()); | |
613 | |
614 if (status != media::MEDIA_CODEC_OK) { | |
615 POST_ERROR(PLATFORM_FAILURE, "Failed to QueueInputBuffer: " << status); | |
616 return false; | |
617 } | |
618 | |
619 return true; | |
620 } | |
621 | |
622 bool AndroidVideoDecodeAccelerator::DequeueOutput() { | |
623 DCHECK(thread_checker_.CalledOnValidThread()); | |
624 TRACE_EVENT0("media", "AVDA::DequeueOutput"); | |
625 base::AutoReset<bool> auto_reset(&defer_errors_, true); | |
626 if (picturebuffers_requested_ && output_picture_buffers_.empty()) | |
627 return false; | |
628 | |
629 if (!output_picture_buffers_.empty() && free_picture_ids_.empty()) { | |
630 // Don't have any picture buffer to send. Need to wait more. | |
631 return false; | |
632 } | |
633 | |
634 bool eos = false; | |
635 base::TimeDelta presentation_timestamp; | |
636 int32_t buf_index = 0; | |
637 do { | |
638 size_t offset = 0; | |
639 size_t size = 0; | |
640 | |
641 TRACE_EVENT_BEGIN0("media", "AVDA::DequeueOutput"); | |
642 media::MediaCodecStatus status = media_codec_->DequeueOutputBuffer( | |
643 NoWaitTimeOut(), &buf_index, &offset, &size, &presentation_timestamp, | |
644 &eos, NULL); | |
645 TRACE_EVENT_END2("media", "AVDA::DequeueOutput", "status", status, | |
646 "presentation_timestamp (ms)", | |
647 presentation_timestamp.InMilliseconds()); | |
648 | |
649 switch (status) { | |
650 case media::MEDIA_CODEC_ERROR: | |
651 POST_ERROR(PLATFORM_FAILURE, "DequeueOutputBuffer failed."); | |
652 return false; | |
653 | |
654 case media::MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER: | |
655 return false; | |
656 | |
657 case media::MEDIA_CODEC_OUTPUT_FORMAT_CHANGED: { | |
658 if (media_codec_->GetOutputSize(&size_) != media::MEDIA_CODEC_OK) { | |
659 POST_ERROR(PLATFORM_FAILURE, "GetOutputSize failed."); | |
660 return false; | |
661 } | |
662 DVLOG(3) << __FUNCTION__ | |
663 << " OUTPUT_FORMAT_CHANGED, new size: " << size_.ToString(); | |
664 | |
665 // Don't request picture buffers if we already have some. This avoids | |
666 // having to dismiss the existing buffers which may actively reference | |
667 // decoded images. Breaking their connection to the decoded image will | |
668 // cause rendering of black frames. Instead, we let the existing | |
669 // PictureBuffers live on and we simply update their size the next time | |
670 // they're attachted to an image of the new resolution. See the | |
671 // size update in |SendDecodedFrameToClient| and https://crbug/587994. | |
672 if (output_picture_buffers_.empty() && !picturebuffers_requested_) { | |
673 picturebuffers_requested_ = true; | |
674 base::MessageLoop::current()->PostTask( | |
675 FROM_HERE, | |
676 base::Bind(&AndroidVideoDecodeAccelerator::RequestPictureBuffers, | |
677 weak_this_factory_.GetWeakPtr())); | |
678 return false; | |
679 } | |
680 | |
681 return true; | |
682 } | |
683 | |
684 case media::MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED: | |
685 break; | |
686 | |
687 case media::MEDIA_CODEC_OK: | |
688 DCHECK_GE(buf_index, 0); | |
689 DVLOG(3) << __FUNCTION__ << ": pts:" << presentation_timestamp | |
690 << " buf_index:" << buf_index << " offset:" << offset | |
691 << " size:" << size << " eos:" << eos; | |
692 break; | |
693 | |
694 default: | |
695 NOTREACHED(); | |
696 break; | |
697 } | |
698 } while (buf_index < 0); | |
699 | |
700 if (eos) { | |
701 DVLOG(3) << __FUNCTION__ << ": Resetting codec state after EOS"; | |
702 | |
703 // If we were waiting for an EOS, clear the state and reset the MediaCodec | |
704 // as normal. Otherwise, enter the ERROR state which will force destruction | |
705 // of MediaCodec during ResetCodecState(). | |
706 // | |
707 // Some Android platforms seem to send an EOS buffer even when we're not | |
708 // expecting it. In this case, destroy and reset the codec but don't notify | |
709 // flush done since it violates the state machine. http://crbug.com/585959. | |
710 const bool was_waiting_for_eos = state_ == WAITING_FOR_EOS; | |
711 state_ = was_waiting_for_eos ? NO_ERROR : ERROR; | |
712 | |
713 ResetCodecState(); | |
714 // |media_codec_| might still be null. | |
715 if (was_waiting_for_eos) { | |
716 base::MessageLoop::current()->PostTask( | |
717 FROM_HERE, base::Bind(&AndroidVideoDecodeAccelerator::NotifyFlushDone, | |
718 weak_this_factory_.GetWeakPtr())); | |
719 } | |
720 return false; | |
721 } | |
722 | |
723 if (!picturebuffers_requested_) { | |
724 // If, somehow, we get a decoded frame back before a FORMAT_CHANGED | |
725 // message, then we might not have any picture buffers to use. This | |
726 // isn't supposed to happen (see EncodeDecodeTest.java#617). | |
727 // Log a metric to see how common this is. | |
728 RecordFormatChangedMetric(FormatChangedValue::MissingFormatChanged); | |
729 media_codec_->ReleaseOutputBuffer(buf_index, false); | |
730 POST_ERROR(PLATFORM_FAILURE, "Dequeued buffers before FORMAT_CHANGED."); | |
731 return false; | |
732 } | |
733 | |
734 // Get the bitstream buffer id from the timestamp. | |
735 auto it = bitstream_buffers_in_decoder_.find(presentation_timestamp); | |
736 | |
737 if (it != bitstream_buffers_in_decoder_.end()) { | |
738 const int32_t bitstream_buffer_id = it->second; | |
739 bitstream_buffers_in_decoder_.erase(bitstream_buffers_in_decoder_.begin(), | |
740 ++it); | |
741 SendDecodedFrameToClient(buf_index, bitstream_buffer_id); | |
742 | |
743 // Removes ids former or equal than the id from decoder. Note that | |
744 // |bitstreams_notified_in_advance_| does not mean bitstream ids in decoder | |
745 // because of frame reordering issue. We just maintain this roughly and use | |
746 // it for throttling. | |
747 for (auto bitstream_it = bitstreams_notified_in_advance_.begin(); | |
748 bitstream_it != bitstreams_notified_in_advance_.end(); | |
749 ++bitstream_it) { | |
750 if (*bitstream_it == bitstream_buffer_id) { | |
751 bitstreams_notified_in_advance_.erase( | |
752 bitstreams_notified_in_advance_.begin(), ++bitstream_it); | |
753 break; | |
754 } | |
755 } | |
756 } else { | |
757 // Normally we assume that the decoder makes at most one output frame for | |
758 // each distinct input timestamp. However MediaCodecBridge uses timestamp | |
759 // correction and provides a non-decreasing timestamp sequence, which might | |
760 // result in timestamp duplicates. Discard the frame if we cannot get the | |
761 // corresponding buffer id. | |
762 DVLOG(3) << __FUNCTION__ << ": Releasing buffer with unexpected PTS: " | |
763 << presentation_timestamp; | |
764 media_codec_->ReleaseOutputBuffer(buf_index, false); | |
765 } | |
766 | |
767 // We got a decoded frame, so try for another. | |
768 return true; | |
769 } | |
770 | |
771 void AndroidVideoDecodeAccelerator::SendDecodedFrameToClient( | |
772 int32_t codec_buffer_index, | |
773 int32_t bitstream_id) { | |
774 DCHECK(thread_checker_.CalledOnValidThread()); | |
775 DCHECK_NE(bitstream_id, -1); | |
776 DCHECK(!free_picture_ids_.empty()); | |
777 TRACE_EVENT0("media", "AVDA::SendDecodedFrameToClient"); | |
778 | |
779 if (!make_context_current_cb_.Run()) { | |
780 POST_ERROR(PLATFORM_FAILURE, "Failed to make the GL context current."); | |
781 return; | |
782 } | |
783 | |
784 int32_t picture_buffer_id = free_picture_ids_.front(); | |
785 free_picture_ids_.pop(); | |
786 TRACE_COUNTER1("media", "AVDA::FreePictureIds", free_picture_ids_.size()); | |
787 | |
788 const auto& i = output_picture_buffers_.find(picture_buffer_id); | |
789 if (i == output_picture_buffers_.end()) { | |
790 POST_ERROR(PLATFORM_FAILURE, | |
791 "Can't find PictureBuffer id: " << picture_buffer_id); | |
792 return; | |
793 } | |
794 | |
795 bool size_changed = false; | |
796 if (i->second.size() != size_) { | |
797 // Size may have changed due to resolution change since the last time this | |
798 // PictureBuffer was used. | |
799 strategy_->UpdatePictureBufferSize(&i->second, size_); | |
800 size_changed = true; | |
801 } | |
802 | |
803 // Connect the PictureBuffer to the decoded frame, via whatever | |
804 // mechanism the strategy likes. | |
805 strategy_->UseCodecBufferForPictureBuffer(codec_buffer_index, i->second); | |
806 | |
807 const bool allow_overlay = strategy_->ArePicturesOverlayable(); | |
808 | |
809 media::Picture picture(picture_buffer_id, bitstream_id, gfx::Rect(size_), | |
810 allow_overlay); | |
811 picture.set_size_changed(size_changed); | |
812 | |
813 base::MessageLoop::current()->PostTask( | |
814 FROM_HERE, base::Bind(&AndroidVideoDecodeAccelerator::NotifyPictureReady, | |
815 weak_this_factory_.GetWeakPtr(), picture)); | |
816 } | |
817 | |
818 void AndroidVideoDecodeAccelerator::Decode( | |
819 const media::BitstreamBuffer& bitstream_buffer) { | |
820 DCHECK(thread_checker_.CalledOnValidThread()); | |
821 | |
822 if (bitstream_buffer.id() >= 0 && bitstream_buffer.size() > 0) { | |
823 DecodeBuffer(bitstream_buffer); | |
824 return; | |
825 } | |
826 | |
827 if (base::SharedMemory::IsHandleValid(bitstream_buffer.handle())) | |
828 base::SharedMemory::CloseHandle(bitstream_buffer.handle()); | |
829 | |
830 if (bitstream_buffer.id() < 0) { | |
831 POST_ERROR(INVALID_ARGUMENT, | |
832 "Invalid bistream_buffer, id: " << bitstream_buffer.id()); | |
833 } else { | |
834 base::MessageLoop::current()->PostTask( | |
835 FROM_HERE, | |
836 base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, | |
837 weak_this_factory_.GetWeakPtr(), bitstream_buffer.id())); | |
838 } | |
839 } | |
840 | |
841 void AndroidVideoDecodeAccelerator::DecodeBuffer( | |
842 const media::BitstreamBuffer& bitstream_buffer) { | |
843 pending_bitstream_buffers_.push(bitstream_buffer); | |
844 TRACE_COUNTER1("media", "AVDA::PendingBitstreamBufferCount", | |
845 pending_bitstream_buffers_.size()); | |
846 | |
847 DoIOTask(true); | |
848 } | |
849 | |
850 void AndroidVideoDecodeAccelerator::RequestPictureBuffers() { | |
851 client_->ProvidePictureBuffers(kNumPictureBuffers, 1, | |
852 strategy_->GetPictureBufferSize(), | |
853 strategy_->GetTextureTarget()); | |
854 } | |
855 | |
856 void AndroidVideoDecodeAccelerator::AssignPictureBuffers( | |
857 const std::vector<media::PictureBuffer>& buffers) { | |
858 DCHECK(thread_checker_.CalledOnValidThread()); | |
859 DCHECK(output_picture_buffers_.empty()); | |
860 DCHECK(free_picture_ids_.empty()); | |
861 | |
862 if (buffers.size() < kNumPictureBuffers) { | |
863 POST_ERROR(INVALID_ARGUMENT, "Not enough picture buffers assigned."); | |
864 return; | |
865 } | |
866 | |
867 const bool have_context = make_context_current_cb_.Run(); | |
868 LOG_IF(WARNING, !have_context) | |
869 << "Failed to make GL context current for Assign, continuing."; | |
870 | |
871 for (size_t i = 0; i < buffers.size(); ++i) { | |
872 if (buffers[i].size() != strategy_->GetPictureBufferSize()) { | |
873 POST_ERROR(INVALID_ARGUMENT, | |
874 "Invalid picture buffer size assigned. Wanted " | |
875 << size_.ToString() << ", but got " | |
876 << buffers[i].size().ToString()); | |
877 return; | |
878 } | |
879 int32_t id = buffers[i].id(); | |
880 output_picture_buffers_.insert(std::make_pair(id, buffers[i])); | |
881 free_picture_ids_.push(id); | |
882 | |
883 strategy_->AssignOnePictureBuffer(buffers[i], have_context); | |
884 } | |
885 TRACE_COUNTER1("media", "AVDA::FreePictureIds", free_picture_ids_.size()); | |
886 DoIOTask(true); | |
887 } | |
888 | |
889 void AndroidVideoDecodeAccelerator::ReusePictureBuffer( | |
890 int32_t picture_buffer_id) { | |
891 DCHECK(thread_checker_.CalledOnValidThread()); | |
892 | |
893 free_picture_ids_.push(picture_buffer_id); | |
894 TRACE_COUNTER1("media", "AVDA::FreePictureIds", free_picture_ids_.size()); | |
895 | |
896 OutputBufferMap::const_iterator i = | |
897 output_picture_buffers_.find(picture_buffer_id); | |
898 if (i == output_picture_buffers_.end()) { | |
899 POST_ERROR(PLATFORM_FAILURE, "Can't find PictureBuffer id " | |
900 << picture_buffer_id); | |
901 return; | |
902 } | |
903 | |
904 strategy_->ReuseOnePictureBuffer(i->second); | |
905 DoIOTask(true); | |
906 } | |
907 | |
908 void AndroidVideoDecodeAccelerator::Flush() { | |
909 DCHECK(thread_checker_.CalledOnValidThread()); | |
910 | |
911 DecodeBuffer(media::BitstreamBuffer(-1, base::SharedMemoryHandle(), 0)); | |
912 } | |
913 | |
914 void AndroidVideoDecodeAccelerator::ConfigureMediaCodecAsynchronously() { | |
915 DCHECK(thread_checker_.CalledOnValidThread()); | |
916 | |
917 // It's probably okay just to return here, since the codec will be configured | |
918 // asynchronously. It's unclear that any state for the new request could | |
919 // be different, unless somebody modifies |codec_config_| while we're already | |
920 // waiting for a codec. One shouldn't do that for thread safety. | |
921 DCHECK_NE(state_, WAITING_FOR_CODEC); | |
922 | |
923 state_ = WAITING_FOR_CODEC; | |
924 | |
925 // Tell the strategy that we're changing codecs. The codec itself could be | |
926 // used normally, since we don't replace it until we're back on the main | |
927 // thread. However, if we're using an output surface, then the incoming codec | |
928 // might access that surface while the main thread is drawing. Telling the | |
929 // strategy to forget the codec avoids this. | |
930 if (media_codec_) { | |
931 media_codec_.reset(); | |
932 strategy_->CodecChanged(nullptr, output_picture_buffers_); | |
933 } | |
934 | |
935 base::PostTaskAndReplyWithResult( | |
936 g_avda_timer.Pointer()->ConstructionTaskRunner().get(), FROM_HERE, | |
937 base::Bind(&AndroidVideoDecodeAccelerator::ConfigureMediaCodecOnAnyThread, | |
938 codec_config_), | |
939 base::Bind(&AndroidVideoDecodeAccelerator::OnCodecConfigured, | |
940 weak_this_factory_.GetWeakPtr())); | |
941 } | |
942 | |
943 bool AndroidVideoDecodeAccelerator::ConfigureMediaCodecSynchronously() { | |
944 state_ = WAITING_FOR_CODEC; | |
945 std::unique_ptr<media::VideoCodecBridge> media_codec = | |
946 ConfigureMediaCodecOnAnyThread(codec_config_); | |
947 OnCodecConfigured(std::move(media_codec)); | |
948 return !!media_codec_; | |
949 } | |
950 | |
951 std::unique_ptr<media::VideoCodecBridge> | |
952 AndroidVideoDecodeAccelerator::ConfigureMediaCodecOnAnyThread( | |
953 scoped_refptr<CodecConfig> codec_config) { | |
954 TRACE_EVENT0("media", "AVDA::ConfigureMediaCodec"); | |
955 | |
956 jobject media_crypto = codec_config->media_crypto_ | |
957 ? codec_config->media_crypto_->obj() | |
958 : nullptr; | |
959 | |
960 // |needs_protected_surface_| implies encrypted stream. | |
961 DCHECK(!codec_config->needs_protected_surface_ || media_crypto); | |
962 | |
963 return std::unique_ptr<media::VideoCodecBridge>( | |
964 media::VideoCodecBridge::CreateDecoder( | |
965 codec_config->codec_, codec_config->needs_protected_surface_, | |
966 codec_config->initial_expected_coded_size_, | |
967 codec_config->surface_.j_surface().obj(), media_crypto, true)); | |
968 } | |
969 | |
970 void AndroidVideoDecodeAccelerator::OnCodecConfigured( | |
971 std::unique_ptr<media::VideoCodecBridge> media_codec) { | |
972 DCHECK(thread_checker_.CalledOnValidThread()); | |
973 DCHECK_EQ(state_, WAITING_FOR_CODEC); | |
974 | |
975 media_codec_ = std::move(media_codec); | |
976 | |
977 // Record one instance of the codec being initialized. | |
978 RecordFormatChangedMetric(FormatChangedValue::CodecInitialized); | |
979 | |
980 strategy_->CodecChanged(media_codec_.get(), output_picture_buffers_); | |
981 | |
982 // If we are supposed to notify that initialization is complete, then do so | |
983 // now. Otherwise, this is a reconfiguration. | |
984 if (deferred_initialization_pending_) { | |
985 NotifyInitializationComplete(!!media_codec_); | |
986 deferred_initialization_pending_ = false; | |
987 } | |
988 | |
989 if (!media_codec_) { | |
990 POST_ERROR(PLATFORM_FAILURE, "Failed to create MediaCodec."); | |
991 return; | |
992 } | |
993 | |
994 state_ = NO_ERROR; | |
995 | |
996 ManageTimer(true); | |
997 } | |
998 | |
999 void AndroidVideoDecodeAccelerator::ResetCodecState() { | |
1000 DCHECK(thread_checker_.CalledOnValidThread()); | |
1001 | |
1002 // If there is already a reset in flight, then that counts. This can really | |
1003 // only happen if somebody calls Reset. | |
1004 if (state_ == WAITING_FOR_CODEC) | |
1005 return; | |
1006 | |
1007 bitstream_buffers_in_decoder_.clear(); | |
1008 | |
1009 if (pending_input_buf_index_ != -1) { | |
1010 // The data for that index exists in the input buffer, but corresponding | |
1011 // shm block been deleted. Check that it is safe to flush the coec, i.e. | |
1012 // |pending_bitstream_buffers_| is empty. | |
1013 // TODO(timav): keep shm block for that buffer and remove this restriction. | |
1014 DCHECK(pending_bitstream_buffers_.empty()); | |
1015 pending_input_buf_index_ = -1; | |
1016 } | |
1017 | |
1018 if (state_ == WAITING_FOR_KEY) | |
1019 state_ = NO_ERROR; | |
1020 | |
1021 // We might increment error_sequence_token here to cancel any delayed errors, | |
1022 // but right now it's unclear that it's safe to do so. If we are in an error | |
1023 // state because of a codec error, then it would be okay. Otherwise, it's | |
1024 // less obvious that we are exiting the error state. Since deferred errors | |
1025 // are only intended for fullscreen transitions right now, we take the more | |
1026 // conservative approach and let the errors post. | |
1027 // TODO(liberato): revisit this once we sort out the error state a bit more. | |
1028 | |
1029 // When codec is not in error state we can quickly reset (internally calls | |
1030 // flush()) for JB-MR2 and beyond. Prior to JB-MR2, flush() had several bugs | |
1031 // (b/8125974, b/8347958) so we must delete the MediaCodec and create a new | |
1032 // one. The full reconfigure is much slower and may cause visible freezing if | |
1033 // done mid-stream. | |
1034 if (state_ == NO_ERROR && | |
1035 base::android::BuildInfo::GetInstance()->sdk_int() >= 18) { | |
1036 DVLOG(3) << __FUNCTION__ << " Doing fast MediaCodec reset (flush)."; | |
1037 media_codec_->Reset(); | |
1038 // Since we just flushed all the output buffers, make sure that nothing is | |
1039 // using them. | |
1040 strategy_->CodecChanged(media_codec_.get(), output_picture_buffers_); | |
1041 } else { | |
1042 DVLOG(3) << __FUNCTION__ | |
1043 << " Deleting the MediaCodec and creating a new one."; | |
1044 g_avda_timer.Pointer()->StopTimer(this); | |
1045 // Changing the codec will also notify the strategy to forget about any | |
1046 // output buffers it has currently. | |
1047 state_ = NO_ERROR; | |
1048 ConfigureMediaCodecAsynchronously(); | |
1049 } | |
1050 } | |
1051 | |
1052 void AndroidVideoDecodeAccelerator::Reset() { | |
1053 DCHECK(thread_checker_.CalledOnValidThread()); | |
1054 TRACE_EVENT0("media", "AVDA::Reset"); | |
1055 | |
1056 while (!pending_bitstream_buffers_.empty()) { | |
1057 int32_t bitstream_buffer_id = pending_bitstream_buffers_.front().id(); | |
1058 pending_bitstream_buffers_.pop(); | |
1059 | |
1060 if (bitstream_buffer_id != -1) { | |
1061 base::MessageLoop::current()->PostTask( | |
1062 FROM_HERE, | |
1063 base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, | |
1064 weak_this_factory_.GetWeakPtr(), bitstream_buffer_id)); | |
1065 } | |
1066 } | |
1067 TRACE_COUNTER1("media", "AVDA::PendingBitstreamBufferCount", 0); | |
1068 bitstreams_notified_in_advance_.clear(); | |
1069 | |
1070 // Any error that is waiting to post can be ignored. | |
1071 error_sequence_token_++; | |
1072 | |
1073 ResetCodecState(); | |
1074 | |
1075 // Note that |media_codec_| might not yet be ready, but we can still post | |
1076 // this anyway. | |
1077 base::MessageLoop::current()->PostTask( | |
1078 FROM_HERE, base::Bind(&AndroidVideoDecodeAccelerator::NotifyResetDone, | |
1079 weak_this_factory_.GetWeakPtr())); | |
1080 } | |
1081 | |
1082 void AndroidVideoDecodeAccelerator::Destroy() { | |
1083 DCHECK(thread_checker_.CalledOnValidThread()); | |
1084 | |
1085 bool have_context = make_context_current_cb_.Run(); | |
1086 if (!have_context) | |
1087 LOG(WARNING) << "Failed make GL context current for Destroy, continuing."; | |
1088 | |
1089 if (strategy_) | |
1090 strategy_->Cleanup(have_context, output_picture_buffers_); | |
1091 | |
1092 // If we have an OnFrameAvailable handler, tell it that we're going away. | |
1093 if (on_frame_available_handler_) { | |
1094 on_frame_available_handler_->ClearOwner(); | |
1095 on_frame_available_handler_ = nullptr; | |
1096 } | |
1097 | |
1098 // Note that async codec construction might still be in progress. In that | |
1099 // case, the codec will be deleted when it completes once we invalidate all | |
1100 // our weak refs. | |
1101 weak_this_factory_.InvalidateWeakPtrs(); | |
1102 if (media_codec_) { | |
1103 g_avda_timer.Pointer()->StopTimer(this); | |
1104 media_codec_.reset(); | |
1105 } | |
1106 delete this; | |
1107 } | |
1108 | |
1109 bool AndroidVideoDecodeAccelerator::TryToSetupDecodeOnSeparateThread( | |
1110 const base::WeakPtr<Client>& decode_client, | |
1111 const scoped_refptr<base::SingleThreadTaskRunner>& decode_task_runner) { | |
1112 return false; | |
1113 } | |
1114 | |
1115 const gfx::Size& AndroidVideoDecodeAccelerator::GetSize() const { | |
1116 return size_; | |
1117 } | |
1118 | |
1119 const base::ThreadChecker& AndroidVideoDecodeAccelerator::ThreadChecker() | |
1120 const { | |
1121 return thread_checker_; | |
1122 } | |
1123 | |
1124 base::WeakPtr<gpu::gles2::GLES2Decoder> | |
1125 AndroidVideoDecodeAccelerator::GetGlDecoder() const { | |
1126 return get_gles2_decoder_cb_.Run(); | |
1127 } | |
1128 | |
1129 gpu::gles2::TextureRef* AndroidVideoDecodeAccelerator::GetTextureForPicture( | |
1130 const media::PictureBuffer& picture_buffer) { | |
1131 auto gles_decoder = GetGlDecoder(); | |
1132 RETURN_ON_FAILURE(this, gles_decoder, "Failed to get GL decoder", | |
1133 ILLEGAL_STATE, nullptr); | |
1134 RETURN_ON_FAILURE(this, gles_decoder->GetContextGroup(), | |
1135 "Null gles_decoder->GetContextGroup()", ILLEGAL_STATE, | |
1136 nullptr); | |
1137 gpu::gles2::TextureManager* texture_manager = | |
1138 gles_decoder->GetContextGroup()->texture_manager(); | |
1139 RETURN_ON_FAILURE(this, texture_manager, "Null texture_manager", | |
1140 ILLEGAL_STATE, nullptr); | |
1141 | |
1142 DCHECK_LE(1u, picture_buffer.internal_texture_ids().size()); | |
1143 gpu::gles2::TextureRef* texture_ref = | |
1144 texture_manager->GetTexture(picture_buffer.internal_texture_ids()[0]); | |
1145 RETURN_ON_FAILURE(this, texture_manager, "Null texture_ref", ILLEGAL_STATE, | |
1146 nullptr); | |
1147 | |
1148 return texture_ref; | |
1149 } | |
1150 | |
1151 void AndroidVideoDecodeAccelerator::OnFrameAvailable() { | |
1152 // Remember: this may be on any thread. | |
1153 DCHECK(strategy_); | |
1154 strategy_->OnFrameAvailable(); | |
1155 } | |
1156 | |
1157 void AndroidVideoDecodeAccelerator::PostError( | |
1158 const ::tracked_objects::Location& from_here, | |
1159 media::VideoDecodeAccelerator::Error error) { | |
1160 base::MessageLoop::current()->PostDelayedTask( | |
1161 from_here, | |
1162 base::Bind(&AndroidVideoDecodeAccelerator::NotifyError, | |
1163 weak_this_factory_.GetWeakPtr(), error, error_sequence_token_), | |
1164 (defer_errors_ ? ErrorPostingDelay() : base::TimeDelta())); | |
1165 state_ = ERROR; | |
1166 } | |
1167 | |
1168 void AndroidVideoDecodeAccelerator::OnMediaCryptoReady( | |
1169 media::MediaDrmBridgeCdmContext::JavaObjectPtr media_crypto, | |
1170 bool needs_protected_surface) { | |
1171 DVLOG(1) << __FUNCTION__; | |
1172 | |
1173 if (!media_crypto) { | |
1174 LOG(ERROR) << "MediaCrypto is not available, can't play encrypted stream."; | |
1175 cdm_for_reference_holding_only_ = nullptr; | |
1176 media_drm_bridge_cdm_context_ = nullptr; | |
1177 NotifyInitializationComplete(false); | |
1178 return; | |
1179 } | |
1180 | |
1181 DCHECK(!media_crypto->is_null()); | |
1182 | |
1183 // We assume this is a part of the initialization process, thus MediaCodec | |
1184 // is not created yet. | |
1185 DCHECK(!media_codec_); | |
1186 | |
1187 codec_config_->media_crypto_ = std::move(media_crypto); | |
1188 codec_config_->needs_protected_surface_ = needs_protected_surface; | |
1189 | |
1190 // After receiving |media_crypto_| we can configure MediaCodec. | |
1191 ConfigureMediaCodecAsynchronously(); | |
1192 } | |
1193 | |
1194 void AndroidVideoDecodeAccelerator::OnKeyAdded() { | |
1195 DVLOG(1) << __FUNCTION__; | |
1196 | |
1197 if (state_ == WAITING_FOR_KEY) | |
1198 state_ = NO_ERROR; | |
1199 | |
1200 DoIOTask(true); | |
1201 } | |
1202 | |
1203 void AndroidVideoDecodeAccelerator::NotifyInitializationComplete(bool success) { | |
1204 client_->NotifyInitializationComplete(success); | |
1205 } | |
1206 | |
1207 void AndroidVideoDecodeAccelerator::NotifyPictureReady( | |
1208 const media::Picture& picture) { | |
1209 client_->PictureReady(picture); | |
1210 } | |
1211 | |
1212 void AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer( | |
1213 int input_buffer_id) { | |
1214 client_->NotifyEndOfBitstreamBuffer(input_buffer_id); | |
1215 } | |
1216 | |
1217 void AndroidVideoDecodeAccelerator::NotifyFlushDone() { | |
1218 client_->NotifyFlushDone(); | |
1219 } | |
1220 | |
1221 void AndroidVideoDecodeAccelerator::NotifyResetDone() { | |
1222 client_->NotifyResetDone(); | |
1223 } | |
1224 | |
1225 void AndroidVideoDecodeAccelerator::NotifyError( | |
1226 media::VideoDecodeAccelerator::Error error, | |
1227 int token) { | |
1228 DVLOG(1) << __FUNCTION__ << ": error: " << error << " token: " << token | |
1229 << " current: " << error_sequence_token_; | |
1230 if (token != error_sequence_token_) | |
1231 return; | |
1232 | |
1233 client_->NotifyError(error); | |
1234 } | |
1235 | |
1236 void AndroidVideoDecodeAccelerator::ManageTimer(bool did_work) { | |
1237 bool should_be_running = true; | |
1238 | |
1239 base::TimeTicks now = base::TimeTicks::Now(); | |
1240 if (!did_work && !most_recent_work_.is_null()) { | |
1241 // Make sure that we have done work recently enough, else stop the timer. | |
1242 if (now - most_recent_work_ > IdleTimerTimeOut()) { | |
1243 most_recent_work_ = base::TimeTicks(); | |
1244 should_be_running = false; | |
1245 } | |
1246 } else { | |
1247 most_recent_work_ = now; | |
1248 } | |
1249 | |
1250 if (should_be_running) | |
1251 g_avda_timer.Pointer()->StartTimer(this); | |
1252 else | |
1253 g_avda_timer.Pointer()->StopTimer(this); | |
1254 } | |
1255 | |
1256 // static | |
1257 bool AndroidVideoDecodeAccelerator::UseDeferredRenderingStrategy( | |
1258 const gpu::GpuPreferences& gpu_preferences) { | |
1259 // TODO(liberato, watk): Figure out what we want to do about zero copy for | |
1260 // fullscreen external SurfaceView in WebView. http://crbug.com/582170. | |
1261 return !gpu_preferences.enable_threaded_texture_mailboxes; | |
1262 } | |
1263 | |
1264 // static | |
1265 media::VideoDecodeAccelerator::Capabilities | |
1266 AndroidVideoDecodeAccelerator::GetCapabilities( | |
1267 const gpu::GpuPreferences& gpu_preferences) { | |
1268 Capabilities capabilities; | |
1269 SupportedProfiles& profiles = capabilities.supported_profiles; | |
1270 | |
1271 SupportedProfile profile; | |
1272 | |
1273 if (media::MediaCodecUtil::IsVp8DecoderAvailable()) { | |
1274 profile.profile = media::VP8PROFILE_ANY; | |
1275 profile.min_resolution.SetSize(0, 0); | |
1276 profile.max_resolution.SetSize(1920, 1088); | |
1277 // If we know MediaCodec will just create a software codec, prefer our | |
1278 // internal software decoder instead. It's more up to date and secured | |
1279 // within the renderer sandbox. However if the content is encrypted, we | |
1280 // must use MediaCodec anyways since MediaDrm offers no way to decrypt | |
1281 // the buffers and let us use our internal software decoders. | |
1282 profile.encrypted_only = media::VideoCodecBridge::IsKnownUnaccelerated( | |
1283 media::kCodecVP8, media::MEDIA_CODEC_DECODER); | |
1284 profiles.push_back(profile); | |
1285 } | |
1286 | |
1287 if (media::MediaCodecUtil::IsVp9DecoderAvailable()) { | |
1288 profile.min_resolution.SetSize(0, 0); | |
1289 profile.max_resolution.SetSize(1920, 1088); | |
1290 // If we know MediaCodec will just create a software codec, prefer our | |
1291 // internal software decoder instead. It's more up to date and secured | |
1292 // within the renderer sandbox. However if the content is encrypted, we | |
1293 // must use MediaCodec anyways since MediaDrm offers no way to decrypt | |
1294 // the buffers and let us use our internal software decoders. | |
1295 profile.encrypted_only = media::VideoCodecBridge::IsKnownUnaccelerated( | |
1296 media::kCodecVP9, media::MEDIA_CODEC_DECODER); | |
1297 profile.profile = media::VP9PROFILE_PROFILE0; | |
1298 profiles.push_back(profile); | |
1299 profile.profile = media::VP9PROFILE_PROFILE1; | |
1300 profiles.push_back(profile); | |
1301 profile.profile = media::VP9PROFILE_PROFILE2; | |
1302 profiles.push_back(profile); | |
1303 profile.profile = media::VP9PROFILE_PROFILE3; | |
1304 profiles.push_back(profile); | |
1305 } | |
1306 | |
1307 for (const auto& supported_profile : kSupportedH264Profiles) { | |
1308 SupportedProfile profile; | |
1309 profile.profile = supported_profile; | |
1310 profile.min_resolution.SetSize(0, 0); | |
1311 // Advertise support for 4k and let the MediaCodec fail when decoding if it | |
1312 // doesn't support the resolution. It's assumed that consumers won't have | |
1313 // software fallback for H264 on Android anyway. | |
1314 profile.max_resolution.SetSize(3840, 2160); | |
1315 profiles.push_back(profile); | |
1316 } | |
1317 | |
1318 if (UseDeferredRenderingStrategy(gpu_preferences)) { | |
1319 capabilities.flags = media::VideoDecodeAccelerator::Capabilities:: | |
1320 NEEDS_ALL_PICTURE_BUFFERS_TO_DECODE | | |
1321 media::VideoDecodeAccelerator::Capabilities:: | |
1322 SUPPORTS_DEFERRED_INITIALIZATION; | |
1323 if (media::MediaCodecUtil::IsSurfaceViewOutputSupported()) { | |
1324 capabilities.flags |= media::VideoDecodeAccelerator::Capabilities:: | |
1325 SUPPORTS_EXTERNAL_OUTPUT_SURFACE; | |
1326 } | |
1327 } | |
1328 | |
1329 return capabilities; | |
1330 } | |
1331 | |
1332 } // namespace content | |
OLD | NEW |