OLD | NEW |
---|---|
1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "media/renderers/renderer_impl.h" | 5 #include "media/renderers/renderer_impl.h" |
6 | 6 |
7 #include <utility> | 7 #include <utility> |
8 | 8 |
9 #include "base/bind.h" | 9 #include "base/bind.h" |
10 #include "base/callback.h" | 10 #include "base/callback.h" |
(...skipping 190 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
201 } | 201 } |
202 | 202 |
203 time_source_->SetMediaTime(time); | 203 time_source_->SetMediaTime(time); |
204 | 204 |
205 if (audio_renderer_) | 205 if (audio_renderer_) |
206 audio_renderer_->StartPlaying(); | 206 audio_renderer_->StartPlaying(); |
207 if (video_renderer_) | 207 if (video_renderer_) |
208 video_renderer_->StartPlayingFrom(time); | 208 video_renderer_->StartPlayingFrom(time); |
209 } | 209 } |
210 | 210 |
211 void RendererImpl::RestartStreamPlayback(DemuxerStream* stream, | 211 void RendererImpl::OnStreamStatusChanged(DemuxerStream* stream, |
212 bool enabled, | 212 bool enabled, |
213 base::TimeDelta time) { | 213 base::TimeDelta time) { |
214 DCHECK(task_runner_->BelongsToCurrentThread()); | 214 DCHECK(task_runner_->BelongsToCurrentThread()); |
215 DCHECK(stream); | 215 DCHECK(stream); |
216 bool video = (stream->type() == DemuxerStream::VIDEO); | 216 bool video = (stream->type() == DemuxerStream::VIDEO); |
217 DVLOG(1) << __func__ << (video ? " video" : " audio") << " stream=" << stream | 217 DVLOG(1) << __func__ << (video ? " video" : " audio") << " stream=" << stream |
218 << " enabled=" << stream->enabled() << " time=" << time.InSecondsF(); | 218 << " enabled=" << enabled << " time=" << time.InSecondsF(); |
219 if ((state_ != STATE_PLAYING) || (audio_ended_ && video_ended_)) | 219 if ((state_ != STATE_PLAYING) || (audio_ended_ && video_ended_)) |
220 return; | 220 return; |
221 if (stream->type() == DemuxerStream::VIDEO) { | 221 if (stream->type() == DemuxerStream::VIDEO) { |
222 DCHECK(video_renderer_); | 222 DCHECK(video_renderer_); |
223 if (restarting_video_) | 223 if (restarting_video_) { |
224 DVLOG(3) << __func__ << ": postponed stream " << stream | |
225 << " status change handling."; | |
226 postponed_video_status_notifications_.push( | |
227 base::Bind(&RendererImpl::OnStreamStatusChanged, weak_this_, stream, | |
228 enabled, time)); | |
224 return; | 229 return; |
230 } | |
225 restarting_video_ = true; | 231 restarting_video_ = true; |
226 video_renderer_->Flush( | 232 video_renderer_->Flush( |
227 base::Bind(&RendererImpl::RestartVideoRenderer, weak_this_, time)); | 233 base::Bind(&RendererImpl::RestartVideoRenderer, weak_this_, time)); |
228 } else if (stream->type() == DemuxerStream::AUDIO) { | 234 } else if (stream->type() == DemuxerStream::AUDIO) { |
229 DCHECK(audio_renderer_); | 235 DCHECK(audio_renderer_); |
230 DCHECK(time_source_); | 236 DCHECK(time_source_); |
231 if (restarting_audio_) | 237 if (restarting_audio_) { |
238 DVLOG(3) << __func__ << ": postponed stream " << stream | |
239 << " status change handling."; | |
240 postponed_audio_status_notifications_.push( | |
241 base::Bind(&RendererImpl::OnStreamStatusChanged, weak_this_, stream, | |
242 enabled, time)); | |
232 return; | 243 return; |
244 } | |
233 restarting_audio_ = true; | 245 restarting_audio_ = true; |
234 // Stop ticking (transition into paused state) in audio renderer before | 246 // Stop ticking (transition into paused state) in audio renderer before |
235 // calling Flush, since after Flush we are going to restart playback by | 247 // calling Flush, since after Flush we are going to restart playback by |
236 // calling audio renderer StartPlaying which would fail in playing state. | 248 // calling audio renderer StartPlaying which would fail in playing state. |
237 if (time_ticking_) { | 249 if (time_ticking_) { |
238 time_ticking_ = false; | 250 time_ticking_ = false; |
239 time_source_->StopTicking(); | 251 time_source_->StopTicking(); |
240 } | 252 } |
241 audio_renderer_->Flush( | 253 audio_renderer_->Flush( |
242 base::Bind(&RendererImpl::RestartAudioRenderer, weak_this_, time)); | 254 base::Bind(&RendererImpl::RestartAudioRenderer, weak_this_, time)); |
(...skipping 130 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
373 | 385 |
374 DemuxerStream* audio_stream = | 386 DemuxerStream* audio_stream = |
375 demuxer_stream_provider_->GetStream(DemuxerStream::AUDIO); | 387 demuxer_stream_provider_->GetStream(DemuxerStream::AUDIO); |
376 if (!audio_stream) { | 388 if (!audio_stream) { |
377 audio_renderer_.reset(); | 389 audio_renderer_.reset(); |
378 task_runner_->PostTask(FROM_HERE, base::Bind(done_cb, PIPELINE_OK)); | 390 task_runner_->PostTask(FROM_HERE, base::Bind(done_cb, PIPELINE_OK)); |
379 return; | 391 return; |
380 } | 392 } |
381 | 393 |
382 audio_stream->SetStreamStatusChangeCB(base::Bind( | 394 audio_stream->SetStreamStatusChangeCB(base::Bind( |
383 &RendererImpl::RestartStreamPlayback, weak_this_, audio_stream)); | 395 &RendererImpl::OnStreamStatusChanged, weak_this_, audio_stream)); |
384 | 396 |
385 audio_renderer_client_.reset( | 397 audio_renderer_client_.reset( |
386 new RendererClientInternal(DemuxerStream::AUDIO, this)); | 398 new RendererClientInternal(DemuxerStream::AUDIO, this)); |
387 // Note: After the initialization of a renderer, error events from it may | 399 // Note: After the initialization of a renderer, error events from it may |
388 // happen at any time and all future calls must guard against STATE_ERROR. | 400 // happen at any time and all future calls must guard against STATE_ERROR. |
389 audio_renderer_->Initialize(audio_stream, cdm_context_, | 401 audio_renderer_->Initialize(audio_stream, cdm_context_, |
390 audio_renderer_client_.get(), done_cb); | 402 audio_renderer_client_.get(), done_cb); |
391 } | 403 } |
392 | 404 |
393 void RendererImpl::OnAudioRendererInitializeDone(PipelineStatus status) { | 405 void RendererImpl::OnAudioRendererInitializeDone(PipelineStatus status) { |
(...skipping 28 matching lines...) Expand all Loading... | |
422 | 434 |
423 DemuxerStream* video_stream = | 435 DemuxerStream* video_stream = |
424 demuxer_stream_provider_->GetStream(DemuxerStream::VIDEO); | 436 demuxer_stream_provider_->GetStream(DemuxerStream::VIDEO); |
425 if (!video_stream) { | 437 if (!video_stream) { |
426 video_renderer_.reset(); | 438 video_renderer_.reset(); |
427 task_runner_->PostTask(FROM_HERE, base::Bind(done_cb, PIPELINE_OK)); | 439 task_runner_->PostTask(FROM_HERE, base::Bind(done_cb, PIPELINE_OK)); |
428 return; | 440 return; |
429 } | 441 } |
430 | 442 |
431 video_stream->SetStreamStatusChangeCB(base::Bind( | 443 video_stream->SetStreamStatusChangeCB(base::Bind( |
432 &RendererImpl::RestartStreamPlayback, weak_this_, video_stream)); | 444 &RendererImpl::OnStreamStatusChanged, weak_this_, video_stream)); |
433 | 445 |
434 video_renderer_client_.reset( | 446 video_renderer_client_.reset( |
435 new RendererClientInternal(DemuxerStream::VIDEO, this)); | 447 new RendererClientInternal(DemuxerStream::VIDEO, this)); |
436 video_renderer_->Initialize( | 448 video_renderer_->Initialize( |
437 video_stream, cdm_context_, video_renderer_client_.get(), | 449 video_stream, cdm_context_, video_renderer_client_.get(), |
438 base::Bind(&RendererImpl::GetWallClockTimes, base::Unretained(this)), | 450 base::Bind(&RendererImpl::GetWallClockTimes, base::Unretained(this)), |
439 done_cb); | 451 done_cb); |
440 } | 452 } |
441 | 453 |
442 void RendererImpl::OnVideoRendererInitializeDone(PipelineStatus status) { | 454 void RendererImpl::OnVideoRendererInitializeDone(PipelineStatus status) { |
(...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
557 return "HAVE_ENOUGH"; | 569 return "HAVE_ENOUGH"; |
558 } | 570 } |
559 NOTREACHED(); | 571 NOTREACHED(); |
560 return ""; | 572 return ""; |
561 } | 573 } |
562 } | 574 } |
563 | 575 |
564 bool RendererImpl::HandleRestartedStreamBufferingChanges( | 576 bool RendererImpl::HandleRestartedStreamBufferingChanges( |
565 DemuxerStream::Type type, | 577 DemuxerStream::Type type, |
566 BufferingState new_buffering_state) { | 578 BufferingState new_buffering_state) { |
579 DCHECK(task_runner_->BelongsToCurrentThread()); | |
567 // When restarting playback we want to defer the BUFFERING_HAVE_NOTHING for | 580 // When restarting playback we want to defer the BUFFERING_HAVE_NOTHING for |
568 // the stream being restarted, to allow continuing uninterrupted playback on | 581 // the stream being restarted, to allow continuing uninterrupted playback on |
569 // the other stream. | 582 // the other stream. |
570 if (type == DemuxerStream::VIDEO && restarting_video_) { | 583 if (type == DemuxerStream::VIDEO && restarting_video_) { |
571 if (new_buffering_state == BUFFERING_HAVE_ENOUGH) { | 584 if (new_buffering_state == BUFFERING_HAVE_ENOUGH) { |
572 DVLOG(1) << __func__ << " Got BUFFERING_HAVE_ENOUGH for video stream," | 585 DVLOG(1) << __func__ << " Got BUFFERING_HAVE_ENOUGH for video stream," |
573 " resuming playback."; | 586 " resuming playback."; |
574 restarting_video_ = false; | 587 restarting_video_ = false; |
588 if (!postponed_video_status_notifications_.empty()) { | |
589 task_runner_->PostTask(FROM_HERE, | |
590 postponed_video_status_notifications_.front()); | |
xhwang
2017/01/04 22:04:16
There's a chance where after this task (say SSC1)
servolk
2017/01/05 02:38:45
Yes, good catch.
We need to set restarting_video_=
| |
591 postponed_video_status_notifications_.pop(); | |
xhwang
2017/01/04 22:04:16
Does it make sense to return here and only proceed
servolk
2017/01/05 02:38:45
I think it's better to do the opposite here, inste
| |
592 } | |
575 if (state_ == STATE_PLAYING && | 593 if (state_ == STATE_PLAYING && |
576 !deferred_video_underflow_cb_.IsCancelled()) { | 594 !deferred_video_underflow_cb_.IsCancelled()) { |
577 // If deferred_video_underflow_cb_ wasn't triggered, then audio should | 595 // If deferred_video_underflow_cb_ wasn't triggered, then audio should |
578 // still be playing, we only need to unpause the video stream. | 596 // still be playing, we only need to unpause the video stream. |
579 DVLOG(4) << "deferred_video_underflow_cb_.Cancel()"; | 597 DVLOG(4) << "deferred_video_underflow_cb_.Cancel()"; |
580 deferred_video_underflow_cb_.Cancel(); | 598 deferred_video_underflow_cb_.Cancel(); |
581 video_buffering_state_ = new_buffering_state; | 599 video_buffering_state_ = new_buffering_state; |
582 if (playback_rate_ > 0) | 600 if (playback_rate_ > 0) |
583 video_renderer_->OnTimeProgressing(); | 601 video_renderer_->OnTimeProgressing(); |
584 return true; | 602 return true; |
(...skipping 25 matching lines...) Expand all Loading... | |
610 DVLOG(4) << "deferred_audio_restart_underflow_cb_.Cancel()"; | 628 DVLOG(4) << "deferred_audio_restart_underflow_cb_.Cancel()"; |
611 deferred_audio_restart_underflow_cb_.Cancel(); | 629 deferred_audio_restart_underflow_cb_.Cancel(); |
612 } else if (new_buffering_state == BUFFERING_HAVE_ENOUGH) { | 630 } else if (new_buffering_state == BUFFERING_HAVE_ENOUGH) { |
613 DVLOG(1) << __func__ << " Got BUFFERING_HAVE_ENOUGH for audio stream," | 631 DVLOG(1) << __func__ << " Got BUFFERING_HAVE_ENOUGH for audio stream," |
614 " resuming playback."; | 632 " resuming playback."; |
615 deferred_audio_restart_underflow_cb_.Cancel(); | 633 deferred_audio_restart_underflow_cb_.Cancel(); |
616 // Now that we have decoded enough audio, pause playback momentarily to | 634 // Now that we have decoded enough audio, pause playback momentarily to |
617 // ensure video renderer is synchronised with audio. | 635 // ensure video renderer is synchronised with audio. |
618 PausePlayback(); | 636 PausePlayback(); |
619 restarting_audio_ = false; | 637 restarting_audio_ = false; |
638 if (!postponed_audio_status_notifications_.empty()) { | |
639 task_runner_->PostTask(FROM_HERE, | |
640 postponed_audio_status_notifications_.front()); | |
641 postponed_audio_status_notifications_.pop(); | |
642 } | |
620 } | 643 } |
621 } | 644 } |
622 return false; | 645 return false; |
623 } | 646 } |
624 | 647 |
625 void RendererImpl::OnBufferingStateChange(DemuxerStream::Type type, | 648 void RendererImpl::OnBufferingStateChange(DemuxerStream::Type type, |
626 BufferingState new_buffering_state) { | 649 BufferingState new_buffering_state) { |
627 DCHECK((type == DemuxerStream::AUDIO) || (type == DemuxerStream::VIDEO)); | 650 DCHECK((type == DemuxerStream::AUDIO) || (type == DemuxerStream::VIDEO)); |
628 BufferingState* buffering_state = type == DemuxerStream::AUDIO | 651 BufferingState* buffering_state = type == DemuxerStream::AUDIO |
629 ? &audio_buffering_state_ | 652 ? &audio_buffering_state_ |
(...skipping 204 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
834 DCHECK(task_runner_->BelongsToCurrentThread()); | 857 DCHECK(task_runner_->BelongsToCurrentThread()); |
835 client_->OnVideoNaturalSizeChange(size); | 858 client_->OnVideoNaturalSizeChange(size); |
836 } | 859 } |
837 | 860 |
838 void RendererImpl::OnVideoOpacityChange(bool opaque) { | 861 void RendererImpl::OnVideoOpacityChange(bool opaque) { |
839 DCHECK(task_runner_->BelongsToCurrentThread()); | 862 DCHECK(task_runner_->BelongsToCurrentThread()); |
840 client_->OnVideoOpacityChange(opaque); | 863 client_->OnVideoOpacityChange(opaque); |
841 } | 864 } |
842 | 865 |
843 } // namespace media | 866 } // namespace media |
OLD | NEW |