Index: media/base/pipeline.cc |
diff --git a/media/base/pipeline.cc b/media/base/pipeline.cc |
index 705a3eb3151a9c8dc2eaa0f258b1fee398c086ca..529e1210a2dc1ca807680285241d23fbb5c51539 100644 |
--- a/media/base/pipeline.cc |
+++ b/media/base/pipeline.cc |
@@ -17,17 +17,12 @@ |
#include "base/strings/string_number_conversions.h" |
#include "base/strings/string_util.h" |
#include "base/synchronization/condition_variable.h" |
-#include "media/base/audio_decoder.h" |
-#include "media/base/audio_renderer.h" |
#include "media/base/filter_collection.h" |
#include "media/base/media_log.h" |
#include "media/base/text_renderer.h" |
#include "media/base/text_track_config.h" |
-#include "media/base/time_delta_interpolator.h" |
-#include "media/base/time_source.h" |
-#include "media/base/video_decoder.h" |
#include "media/base/video_decoder_config.h" |
-#include "media/base/video_renderer.h" |
+#include "media/filters/renderer_impl.h" |
using base::TimeDelta; |
@@ -42,22 +37,17 @@ Pipeline::Pipeline( |
did_loading_progress_(false), |
volume_(1.0f), |
playback_rate_(0.0f), |
- interpolator_(new TimeDeltaInterpolator(&default_tick_clock_)), |
- interpolation_state_(INTERPOLATION_STOPPED), |
status_(PIPELINE_OK), |
+ is_initialized_(false), |
state_(kCreated), |
- audio_ended_(false), |
- video_ended_(false), |
+ ended_(false), |
text_ended_(false), |
- audio_buffering_state_(BUFFERING_HAVE_NOTHING), |
- video_buffering_state_(BUFFERING_HAVE_NOTHING), |
demuxer_(NULL), |
- time_source_(NULL), |
- underflow_disabled_for_testing_(false) { |
+ underflow_disabled_for_testing_(false), |
+ test_interpolator_(NULL) { |
media_log_->AddEvent(media_log_->CreatePipelineStateChangedEvent(kCreated)); |
media_log_->AddEvent( |
media_log_->CreateEvent(MediaLogEvent::PIPELINE_CREATED)); |
- interpolator_->SetBounds(base::TimeDelta(), base::TimeDelta()); |
} |
Pipeline::~Pipeline() { |
@@ -159,7 +149,8 @@ void Pipeline::SetVolume(float volume) { |
TimeDelta Pipeline::GetMediaTime() const { |
base::AutoLock auto_lock(lock_); |
- return std::min(interpolator_->GetInterpolatedTime(), duration_); |
+ return renderer_ ? std::min(renderer_->GetMediaTime(), duration_) |
+ : TimeDelta(); |
} |
Ranges<TimeDelta> Pipeline::GetBufferedTimeRanges() const { |
@@ -184,9 +175,16 @@ PipelineStatistics Pipeline::GetStatistics() const { |
return statistics_; |
} |
+void Pipeline::DisableUnderflowForTesting() { |
+ DCHECK(!renderer_); |
+ underflow_disabled_for_testing_ = true; |
+} |
+ |
void Pipeline::SetTimeDeltaInterpolatorForTesting( |
TimeDeltaInterpolator* interpolator) { |
- interpolator_.reset(interpolator); |
+ DCHECK(!renderer_); |
+ DCHECK(interpolator); |
+ test_interpolator_ = interpolator; |
} |
void Pipeline::SetErrorForTesting(PipelineStatus status) { |
@@ -206,8 +204,7 @@ const char* Pipeline::GetStateString(State state) { |
switch (state) { |
RETURN_STRING(kCreated); |
RETURN_STRING(kInitDemuxer); |
- RETURN_STRING(kInitAudioRenderer); |
- RETURN_STRING(kInitVideoRenderer); |
+ RETURN_STRING(kInitRenderer); |
RETURN_STRING(kSeeking); |
RETURN_STRING(kPlaying); |
RETURN_STRING(kStopping); |
@@ -231,20 +228,13 @@ Pipeline::State Pipeline::GetNextState() const { |
return kInitDemuxer; |
case kInitDemuxer: |
- if (demuxer_->GetStream(DemuxerStream::AUDIO)) |
- return kInitAudioRenderer; |
- if (demuxer_->GetStream(DemuxerStream::VIDEO)) |
- return kInitVideoRenderer; |
- return kPlaying; |
- |
- case kInitAudioRenderer: |
- if (demuxer_->GetStream(DemuxerStream::VIDEO)) |
- return kInitVideoRenderer; |
- return kPlaying; |
- |
- case kInitVideoRenderer: |
+ if (demuxer_->GetStream(DemuxerStream::AUDIO) || |
+ demuxer_->GetStream(DemuxerStream::VIDEO)) { |
+ return kInitRenderer; |
+ } |
return kPlaying; |
+ case kInitRenderer: |
case kSeeking: |
return kPlaying; |
@@ -285,37 +275,6 @@ void Pipeline::SetError(PipelineStatus error) { |
media_log_->AddEvent(media_log_->CreatePipelineErrorEvent(error)); |
} |
-void Pipeline::OnAudioTimeUpdate(TimeDelta time, TimeDelta max_time) { |
- DCHECK(task_runner_->BelongsToCurrentThread()); |
- DCHECK_LE(time.InMicroseconds(), max_time.InMicroseconds()); |
- base::AutoLock auto_lock(lock_); |
- |
- if (interpolation_state_ == INTERPOLATION_WAITING_FOR_AUDIO_TIME_UPDATE && |
- time < interpolator_->GetInterpolatedTime()) { |
- return; |
- } |
- |
- if (state_ == kSeeking) |
- return; |
- |
- interpolator_->SetBounds(time, max_time); |
- StartClockIfWaitingForTimeUpdate_Locked(); |
-} |
- |
-void Pipeline::OnVideoTimeUpdate(TimeDelta max_time) { |
- DCHECK(task_runner_->BelongsToCurrentThread()); |
- |
- if (audio_renderer_) |
- return; |
- |
- if (state_ == kSeeking) |
- return; |
- |
- base::AutoLock auto_lock(lock_); |
- DCHECK_NE(interpolation_state_, INTERPOLATION_WAITING_FOR_AUDIO_TIME_UPDATE); |
- interpolator_->SetUpperBound(max_time); |
-} |
- |
void Pipeline::SetDuration(TimeDelta duration) { |
DCHECK(IsRunning()); |
media_log_->AddEvent( |
@@ -366,29 +325,24 @@ void Pipeline::StateTransitionTask(PipelineStatus status) { |
case kInitDemuxer: |
return InitializeDemuxer(done_cb); |
- case kInitAudioRenderer: |
- return InitializeAudioRenderer(done_cb); |
- |
- case kInitVideoRenderer: |
- return InitializeVideoRenderer(done_cb); |
+ case kInitRenderer: |
+ return InitializeRenderer(done_cb); |
case kPlaying: |
// Finish initial start sequence the first time we enter the playing |
// state. |
- if (filter_collection_) { |
- filter_collection_.reset(); |
- if (!audio_renderer_ && !video_renderer_) { |
+ if (!is_initialized_) { |
+ if (!renderer_) { |
ErrorChangedTask(PIPELINE_ERROR_COULD_NOT_RENDER); |
return; |
} |
- if (audio_renderer_) |
- time_source_ = audio_renderer_->GetTimeSource(); |
+ is_initialized_ = true; |
{ |
PipelineMetadata metadata; |
- metadata.has_audio = audio_renderer_; |
- metadata.has_video = video_renderer_; |
+ metadata.has_audio = renderer_->HasAudio(); |
+ metadata.has_video = renderer_->HasVideo(); |
metadata.timeline_offset = demuxer_->GetTimelineOffset(); |
DemuxerStream* stream = demuxer_->GetStream(DemuxerStream::VIDEO); |
if (stream) { |
@@ -402,17 +356,8 @@ void Pipeline::StateTransitionTask(PipelineStatus status) { |
base::ResetAndReturn(&seek_cb_).Run(PIPELINE_OK); |
- { |
- base::AutoLock auto_lock(lock_); |
- interpolator_->SetBounds(start_timestamp_, start_timestamp_); |
- } |
- |
- if (time_source_) |
- time_source_->SetMediaTime(start_timestamp_); |
- if (audio_renderer_) |
- audio_renderer_->StartPlaying(); |
- if (video_renderer_) |
- video_renderer_->StartPlaying(); |
+ if (renderer_) |
+ renderer_->StartPlayingFrom(start_timestamp_); |
if (text_renderer_) |
text_renderer_->StartPlaying(); |
@@ -429,31 +374,18 @@ void Pipeline::StateTransitionTask(PipelineStatus status) { |
} |
} |
-// Note that the usage of base::Unretained() with the audio/video renderers |
-// in the following DoXXX() functions is considered safe as they are owned by |
+// Note that the usage of base::Unretained() with the renderers in the following |
+// DoXXX() functions is considered safe as they are owned by |
// |pending_callbacks_| and share the same lifetime. |
// |
// That being said, deleting the renderers while keeping |pending_callbacks_| |
// running on the media thread would result in crashes. |
-#if DCHECK_IS_ON |
-static void VerifyBufferingStates(BufferingState* audio_buffering_state, |
- BufferingState* video_buffering_state) { |
- DCHECK_EQ(*audio_buffering_state, BUFFERING_HAVE_NOTHING); |
- DCHECK_EQ(*video_buffering_state, BUFFERING_HAVE_NOTHING); |
-} |
-#endif |
- |
-void Pipeline::DoSeek( |
- base::TimeDelta seek_timestamp, |
- const PipelineStatusCB& done_cb) { |
+void Pipeline::DoSeek(TimeDelta seek_timestamp, |
+ const PipelineStatusCB& done_cb) { |
DCHECK(task_runner_->BelongsToCurrentThread()); |
DCHECK(!pending_callbacks_.get()); |
SerialRunner::Queue bound_fns; |
- { |
- base::AutoLock auto_lock(lock_); |
- PauseClockAndStopTicking_Locked(); |
- } |
// Pause. |
if (text_renderer_) { |
@@ -462,23 +394,11 @@ void Pipeline::DoSeek( |
} |
// Flush. |
- if (audio_renderer_) { |
- bound_fns.Push(base::Bind( |
- &AudioRenderer::Flush, base::Unretained(audio_renderer_.get()))); |
- } |
- |
- if (video_renderer_) { |
+ if (renderer_) { |
bound_fns.Push(base::Bind( |
- &VideoRenderer::Flush, base::Unretained(video_renderer_.get()))); |
+ &Renderer::Flush, base::Unretained(renderer_.get()))); |
} |
-#if DCHECK_IS_ON |
- // Verify renderers reset their buffering states. |
- bound_fns.Push(base::Bind(&VerifyBufferingStates, |
- &audio_buffering_state_, |
- &video_buffering_state_)); |
-#endif |
- |
if (text_renderer_) { |
bound_fns.Push(base::Bind( |
&TextRenderer::Flush, base::Unretained(text_renderer_.get()))); |
@@ -495,8 +415,7 @@ void Pipeline::DoStop(const PipelineStatusCB& done_cb) { |
DCHECK(task_runner_->BelongsToCurrentThread()); |
DCHECK(!pending_callbacks_.get()); |
- audio_renderer_.reset(); |
- video_renderer_.reset(); |
+ renderer_.reset(); |
text_renderer_.reset(); |
if (demuxer_) { |
@@ -510,9 +429,9 @@ void Pipeline::DoStop(const PipelineStatusCB& done_cb) { |
void Pipeline::OnStopCompleted(PipelineStatus status) { |
DCHECK(task_runner_->BelongsToCurrentThread()); |
DCHECK_EQ(state_, kStopping); |
- DCHECK(!audio_renderer_); |
- DCHECK(!video_renderer_); |
+ DCHECK(!renderer_); |
DCHECK(!text_renderer_); |
+ |
{ |
base::AutoLock l(lock_); |
running_ = false; |
@@ -543,35 +462,14 @@ void Pipeline::OnStopCompleted(PipelineStatus status) { |
} |
} |
-void Pipeline::AddBufferedTimeRange(base::TimeDelta start, |
- base::TimeDelta end) { |
+void Pipeline::AddBufferedTimeRange(TimeDelta start, |
+ TimeDelta end) { |
DCHECK(IsRunning()); |
base::AutoLock auto_lock(lock_); |
buffered_time_ranges_.Add(start, end); |
did_loading_progress_ = true; |
} |
-void Pipeline::OnAudioRendererEnded() { |
- // Force post to process ended tasks after current execution frame. |
- task_runner_->PostTask(FROM_HERE, base::Bind( |
- &Pipeline::DoAudioRendererEnded, base::Unretained(this))); |
- media_log_->AddEvent(media_log_->CreateEvent(MediaLogEvent::AUDIO_ENDED)); |
-} |
- |
-void Pipeline::OnVideoRendererEnded() { |
- // Force post to process ended tasks after current execution frame. |
- task_runner_->PostTask(FROM_HERE, base::Bind( |
- &Pipeline::DoVideoRendererEnded, base::Unretained(this))); |
- media_log_->AddEvent(media_log_->CreateEvent(MediaLogEvent::VIDEO_ENDED)); |
-} |
- |
-void Pipeline::OnTextRendererEnded() { |
- // Force post to process ended messages after current execution frame. |
- task_runner_->PostTask(FROM_HERE, base::Bind( |
- &Pipeline::DoTextRendererEnded, base::Unretained(this))); |
- media_log_->AddEvent(media_log_->CreateEvent(MediaLogEvent::TEXT_ENDED)); |
-} |
- |
// Called from any thread. |
void Pipeline::OnUpdateStatistics(const PipelineStatistics& stats) { |
base::AutoLock auto_lock(lock_); |
@@ -637,13 +535,8 @@ void Pipeline::PlaybackRateChangedTask(float playback_rate) { |
if (state_ != kPlaying) |
return; |
- { |
- base::AutoLock auto_lock(lock_); |
- interpolator_->SetPlaybackRate(playback_rate); |
- } |
- |
- if (time_source_) |
- time_source_->SetPlaybackRate(playback_rate_); |
+ if (renderer_) |
+ renderer_->SetPlaybackRate(playback_rate_); |
} |
void Pipeline::VolumeChangedTask(float volume) { |
@@ -653,8 +546,8 @@ void Pipeline::VolumeChangedTask(float volume) { |
if (state_ != kPlaying) |
return; |
- if (audio_renderer_) |
- audio_renderer_->SetVolume(volume); |
+ if (renderer_) |
+ renderer_->SetVolume(volume); |
} |
void Pipeline::SeekTask(TimeDelta time, const PipelineStatusCB& seek_cb) { |
@@ -677,8 +570,7 @@ void Pipeline::SeekTask(TimeDelta time, const PipelineStatusCB& seek_cb) { |
SetState(kSeeking); |
seek_cb_ = seek_cb; |
- audio_ended_ = false; |
- video_ended_ = false; |
+ ended_ = false; |
text_ended_ = false; |
start_timestamp_ = time; |
@@ -686,39 +578,22 @@ void Pipeline::SeekTask(TimeDelta time, const PipelineStatusCB& seek_cb) { |
&Pipeline::OnStateTransition, base::Unretained(this))); |
} |
-void Pipeline::DoAudioRendererEnded() { |
+void Pipeline::OnRendererEnded() { |
DCHECK(task_runner_->BelongsToCurrentThread()); |
+ media_log_->AddEvent(media_log_->CreateEvent(MediaLogEvent::ENDED)); |
if (state_ != kPlaying) |
return; |
- DCHECK(!audio_ended_); |
- audio_ended_ = true; |
- |
- // Start clock since there is no more audio to trigger clock updates. |
- { |
- base::AutoLock auto_lock(lock_); |
- interpolator_->SetUpperBound(duration_); |
- StartClockIfWaitingForTimeUpdate_Locked(); |
- } |
+ DCHECK(!ended_); |
+ ended_ = true; |
RunEndedCallbackIfNeeded(); |
} |
-void Pipeline::DoVideoRendererEnded() { |
- DCHECK(task_runner_->BelongsToCurrentThread()); |
- |
- if (state_ != kPlaying) |
- return; |
- |
- DCHECK(!video_ended_); |
- video_ended_ = true; |
- |
- RunEndedCallbackIfNeeded(); |
-} |
- |
-void Pipeline::DoTextRendererEnded() { |
+void Pipeline::OnTextRendererEnded() { |
DCHECK(task_runner_->BelongsToCurrentThread()); |
+ media_log_->AddEvent(media_log_->CreateEvent(MediaLogEvent::TEXT_ENDED)); |
if (state_ != kPlaying) |
return; |
@@ -732,21 +607,12 @@ void Pipeline::DoTextRendererEnded() { |
void Pipeline::RunEndedCallbackIfNeeded() { |
DCHECK(task_runner_->BelongsToCurrentThread()); |
- if (audio_renderer_ && !audio_ended_) |
- return; |
- |
- if (video_renderer_ && !video_ended_) |
+ if (renderer_ && !ended_) |
return; |
if (text_renderer_ && text_renderer_->HasTracks() && !text_ended_) |
return; |
- { |
- base::AutoLock auto_lock(lock_); |
- PauseClockAndStopTicking_Locked(); |
- interpolator_->SetBounds(duration_, duration_); |
- } |
- |
DCHECK_EQ(status_, PIPELINE_OK); |
ended_cb_.Run(); |
} |
@@ -771,142 +637,33 @@ void Pipeline::InitializeDemuxer(const PipelineStatusCB& done_cb) { |
demuxer_->Initialize(this, done_cb, text_renderer_); |
} |
-void Pipeline::InitializeAudioRenderer(const PipelineStatusCB& done_cb) { |
+void Pipeline::InitializeRenderer(const PipelineStatusCB& done_cb) { |
DCHECK(task_runner_->BelongsToCurrentThread()); |
+ DCHECK(demuxer_); |
- audio_renderer_ = filter_collection_->GetAudioRenderer(); |
- audio_renderer_->Initialize( |
- demuxer_->GetStream(DemuxerStream::AUDIO), |
- done_cb, |
- base::Bind(&Pipeline::OnUpdateStatistics, base::Unretained(this)), |
- base::Bind(&Pipeline::OnAudioTimeUpdate, base::Unretained(this)), |
- base::Bind(&Pipeline::BufferingStateChanged, base::Unretained(this), |
- &audio_buffering_state_), |
- base::Bind(&Pipeline::OnAudioRendererEnded, base::Unretained(this)), |
- base::Bind(&Pipeline::SetError, base::Unretained(this))); |
-} |
+ renderer_.reset(new RendererImpl( |
scherkus (not reviewing)
2014/07/31 20:22:09
it looks like we should inject a Renderer into Pip
xhwang
2014/08/01 00:10:14
Do you mean to pass the Demuxer and Renderer to Pi
|
+ demuxer_, |
+ filter_collection_.Pass(), |
+ task_runner_, |
+ base::Bind(&Pipeline::GetMediaDuration, base::Unretained(this)))); |
-void Pipeline::InitializeVideoRenderer(const PipelineStatusCB& done_cb) { |
- DCHECK(task_runner_->BelongsToCurrentThread()); |
+ if (test_interpolator_) |
+ renderer_->SetTimeDeltaInterpolatorForTesting(test_interpolator_); |
+ if (underflow_disabled_for_testing_) |
+ renderer_->DisableUnderflowForTesting(); |
- video_renderer_ = filter_collection_->GetVideoRenderer(); |
- video_renderer_->Initialize( |
- demuxer_->GetStream(DemuxerStream::VIDEO), |
- demuxer_->GetLiveness() == Demuxer::LIVENESS_LIVE, |
+ renderer_->Initialize( |
done_cb, |
base::Bind(&Pipeline::OnUpdateStatistics, base::Unretained(this)), |
- base::Bind(&Pipeline::OnVideoTimeUpdate, base::Unretained(this)), |
- base::Bind(&Pipeline::BufferingStateChanged, base::Unretained(this), |
- &video_buffering_state_), |
- base::Bind(&Pipeline::OnVideoRendererEnded, base::Unretained(this)), |
+ base::Bind(&Pipeline::OnRendererEnded, base::Unretained(this)), |
base::Bind(&Pipeline::SetError, base::Unretained(this)), |
- base::Bind(&Pipeline::GetMediaTime, base::Unretained(this)), |
- base::Bind(&Pipeline::GetMediaDuration, base::Unretained(this))); |
+ base::Bind(&Pipeline::BufferingStateChanged, base::Unretained(this))); |
} |
-void Pipeline::BufferingStateChanged(BufferingState* buffering_state, |
- BufferingState new_buffering_state) { |
- DVLOG(1) << __FUNCTION__ << "(" << *buffering_state << ", " |
- << " " << new_buffering_state << ") " |
- << (buffering_state == &audio_buffering_state_ ? "audio" : "video"); |
+void Pipeline::BufferingStateChanged(BufferingState new_buffering_state) { |
+ DVLOG(1) << __FUNCTION__ << "(" << new_buffering_state << ") "; |
DCHECK(task_runner_->BelongsToCurrentThread()); |
- bool was_waiting_for_enough_data = WaitingForEnoughData(); |
- |
- *buffering_state = new_buffering_state; |
- |
- // Disable underflow by ignoring updates that renderers have ran out of data |
- // after we have started the clock. |
- if (state_ == kPlaying && underflow_disabled_for_testing_ && |
- interpolation_state_ != INTERPOLATION_STOPPED) { |
- return; |
- } |
- |
- // Renderer underflowed. |
- if (!was_waiting_for_enough_data && WaitingForEnoughData()) { |
- PausePlayback(); |
- |
- // TODO(scherkus): Fire BUFFERING_HAVE_NOTHING callback to alert clients of |
- // underflow state http://crbug.com/144683 |
- return; |
- } |
- |
- // Renderer prerolled. |
- if (was_waiting_for_enough_data && !WaitingForEnoughData()) { |
- StartPlayback(); |
- buffering_state_cb_.Run(BUFFERING_HAVE_ENOUGH); |
- return; |
- } |
-} |
- |
-bool Pipeline::WaitingForEnoughData() const { |
- DCHECK(task_runner_->BelongsToCurrentThread()); |
- if (state_ != kPlaying) |
- return false; |
- if (audio_renderer_ && audio_buffering_state_ != BUFFERING_HAVE_ENOUGH) |
- return true; |
- if (video_renderer_ && video_buffering_state_ != BUFFERING_HAVE_ENOUGH) |
- return true; |
- return false; |
-} |
- |
-void Pipeline::PausePlayback() { |
- DVLOG(1) << __FUNCTION__; |
- DCHECK_EQ(state_, kPlaying); |
- DCHECK(WaitingForEnoughData()); |
- DCHECK(task_runner_->BelongsToCurrentThread()); |
- |
- base::AutoLock auto_lock(lock_); |
- PauseClockAndStopTicking_Locked(); |
-} |
- |
-void Pipeline::StartPlayback() { |
- DVLOG(1) << __FUNCTION__; |
- DCHECK_EQ(state_, kPlaying); |
- DCHECK_EQ(interpolation_state_, INTERPOLATION_STOPPED); |
- DCHECK(!WaitingForEnoughData()); |
- DCHECK(task_runner_->BelongsToCurrentThread()); |
- |
- if (time_source_) { |
- // We use audio stream to update the clock. So if there is such a |
- // stream, we pause the clock until we receive a valid timestamp. |
- base::AutoLock auto_lock(lock_); |
- interpolation_state_ = INTERPOLATION_WAITING_FOR_AUDIO_TIME_UPDATE; |
- time_source_->StartTicking(); |
- } else { |
- base::AutoLock auto_lock(lock_); |
- interpolation_state_ = INTERPOLATION_STARTED; |
- interpolator_->SetUpperBound(duration_); |
- interpolator_->StartInterpolating(); |
- } |
-} |
- |
-void Pipeline::PauseClockAndStopTicking_Locked() { |
- lock_.AssertAcquired(); |
- switch (interpolation_state_) { |
- case INTERPOLATION_STOPPED: |
- return; |
- |
- case INTERPOLATION_WAITING_FOR_AUDIO_TIME_UPDATE: |
- time_source_->StopTicking(); |
- break; |
- |
- case INTERPOLATION_STARTED: |
- if (time_source_) |
- time_source_->StopTicking(); |
- interpolator_->StopInterpolating(); |
- break; |
- } |
- |
- interpolation_state_ = INTERPOLATION_STOPPED; |
-} |
- |
-void Pipeline::StartClockIfWaitingForTimeUpdate_Locked() { |
- lock_.AssertAcquired(); |
- if (interpolation_state_ != INTERPOLATION_WAITING_FOR_AUDIO_TIME_UPDATE) |
- return; |
- |
- interpolation_state_ = INTERPOLATION_STARTED; |
- interpolator_->StartInterpolating(); |
+ buffering_state_cb_.Run(new_buffering_state); |
} |
} // namespace media |