| Index: media/base/pipeline.cc
|
| diff --git a/media/base/pipeline.cc b/media/base/pipeline.cc
|
| index 41eeb98d8a1dc85fb8e845815f2a60cf8372451e..bc55981f6cfbdbaf96e29f2d1c50da616d6b624a 100644
|
| --- a/media/base/pipeline.cc
|
| +++ b/media/base/pipeline.cc
|
| @@ -17,12 +17,17 @@
|
| #include "base/strings/string_number_conversions.h"
|
| #include "base/strings/string_util.h"
|
| #include "base/synchronization/condition_variable.h"
|
| +#include "media/base/audio_decoder.h"
|
| +#include "media/base/audio_renderer.h"
|
| #include "media/base/filter_collection.h"
|
| #include "media/base/media_log.h"
|
| -#include "media/base/renderer.h"
|
| #include "media/base/text_renderer.h"
|
| #include "media/base/text_track_config.h"
|
| +#include "media/base/time_delta_interpolator.h"
|
| +#include "media/base/time_source.h"
|
| +#include "media/base/video_decoder.h"
|
| #include "media/base/video_decoder_config.h"
|
| +#include "media/base/video_renderer.h"
|
|
|
| using base::TimeDelta;
|
|
|
| @@ -37,16 +42,23 @@
|
| did_loading_progress_(false),
|
| volume_(1.0f),
|
| playback_rate_(0.0f),
|
| + interpolator_(new TimeDeltaInterpolator(&default_tick_clock_)),
|
| + interpolation_state_(INTERPOLATION_STOPPED),
|
| status_(PIPELINE_OK),
|
| - is_initialized_(false),
|
| state_(kCreated),
|
| - renderer_ended_(false),
|
| - text_renderer_ended_(false),
|
| + audio_ended_(false),
|
| + video_ended_(false),
|
| + text_ended_(false),
|
| + audio_buffering_state_(BUFFERING_HAVE_NOTHING),
|
| + video_buffering_state_(BUFFERING_HAVE_NOTHING),
|
| demuxer_(NULL),
|
| + time_source_(NULL),
|
| + underflow_disabled_for_testing_(false),
|
| weak_factory_(this) {
|
| media_log_->AddEvent(media_log_->CreatePipelineStateChangedEvent(kCreated));
|
| media_log_->AddEvent(
|
| media_log_->CreateEvent(MediaLogEvent::PIPELINE_CREATED));
|
| + interpolator_->SetBounds(base::TimeDelta(), base::TimeDelta());
|
| }
|
|
|
| Pipeline::~Pipeline() {
|
| @@ -154,8 +166,7 @@
|
|
|
| TimeDelta Pipeline::GetMediaTime() const {
|
| base::AutoLock auto_lock(lock_);
|
| - return renderer_ ? std::min(renderer_->GetMediaTime(), duration_)
|
| - : TimeDelta();
|
| + return std::min(interpolator_->GetInterpolatedTime(), duration_);
|
| }
|
|
|
| Ranges<TimeDelta> Pipeline::GetBufferedTimeRanges() const {
|
| @@ -178,6 +189,11 @@
|
| PipelineStatistics Pipeline::GetStatistics() const {
|
| base::AutoLock auto_lock(lock_);
|
| return statistics_;
|
| +}
|
| +
|
| +void Pipeline::SetTimeDeltaInterpolatorForTesting(
|
| + TimeDeltaInterpolator* interpolator) {
|
| + interpolator_.reset(interpolator);
|
| }
|
|
|
| void Pipeline::SetErrorForTesting(PipelineStatus status) {
|
| @@ -202,7 +218,8 @@
|
| switch (state) {
|
| RETURN_STRING(kCreated);
|
| RETURN_STRING(kInitDemuxer);
|
| - RETURN_STRING(kInitRenderer);
|
| + RETURN_STRING(kInitAudioRenderer);
|
| + RETURN_STRING(kInitVideoRenderer);
|
| RETURN_STRING(kSeeking);
|
| RETURN_STRING(kPlaying);
|
| RETURN_STRING(kStopping);
|
| @@ -226,13 +243,20 @@
|
| return kInitDemuxer;
|
|
|
| case kInitDemuxer:
|
| - if (demuxer_->GetStream(DemuxerStream::AUDIO) ||
|
| - demuxer_->GetStream(DemuxerStream::VIDEO)) {
|
| - return kInitRenderer;
|
| - }
|
| + if (demuxer_->GetStream(DemuxerStream::AUDIO))
|
| + return kInitAudioRenderer;
|
| + if (demuxer_->GetStream(DemuxerStream::VIDEO))
|
| + return kInitVideoRenderer;
|
| return kPlaying;
|
|
|
| - case kInitRenderer:
|
| + case kInitAudioRenderer:
|
| + if (demuxer_->GetStream(DemuxerStream::VIDEO))
|
| + return kInitVideoRenderer;
|
| + return kPlaying;
|
| +
|
| + case kInitVideoRenderer:
|
| + return kPlaying;
|
| +
|
| case kSeeking:
|
| return kPlaying;
|
|
|
| @@ -278,6 +302,37 @@
|
| &Pipeline::ErrorChangedTask, weak_factory_.GetWeakPtr(), error));
|
| }
|
|
|
| +void Pipeline::OnAudioTimeUpdate(TimeDelta time, TimeDelta max_time) {
|
| + DCHECK(task_runner_->BelongsToCurrentThread());
|
| + DCHECK_LE(time.InMicroseconds(), max_time.InMicroseconds());
|
| + base::AutoLock auto_lock(lock_);
|
| +
|
| + if (interpolation_state_ == INTERPOLATION_WAITING_FOR_AUDIO_TIME_UPDATE &&
|
| + time < interpolator_->GetInterpolatedTime()) {
|
| + return;
|
| + }
|
| +
|
| + if (state_ == kSeeking)
|
| + return;
|
| +
|
| + interpolator_->SetBounds(time, max_time);
|
| + StartClockIfWaitingForTimeUpdate_Locked();
|
| +}
|
| +
|
| +void Pipeline::OnVideoTimeUpdate(TimeDelta max_time) {
|
| + DCHECK(task_runner_->BelongsToCurrentThread());
|
| +
|
| + if (audio_renderer_)
|
| + return;
|
| +
|
| + if (state_ == kSeeking)
|
| + return;
|
| +
|
| + base::AutoLock auto_lock(lock_);
|
| + DCHECK_NE(interpolation_state_, INTERPOLATION_WAITING_FOR_AUDIO_TIME_UPDATE);
|
| + interpolator_->SetUpperBound(max_time);
|
| +}
|
| +
|
| void Pipeline::SetDuration(TimeDelta duration) {
|
| DCHECK(IsRunning());
|
| media_log_->AddEvent(
|
| @@ -331,24 +386,29 @@
|
| case kInitDemuxer:
|
| return InitializeDemuxer(done_cb);
|
|
|
| - case kInitRenderer:
|
| - return InitializeRenderer(done_cb);
|
| + case kInitAudioRenderer:
|
| + return InitializeAudioRenderer(done_cb);
|
| +
|
| + case kInitVideoRenderer:
|
| + return InitializeVideoRenderer(done_cb);
|
|
|
| case kPlaying:
|
| // Finish initial start sequence the first time we enter the playing
|
| // state.
|
| - if (!is_initialized_) {
|
| - if (!renderer_) {
|
| + if (filter_collection_) {
|
| + filter_collection_.reset();
|
| + if (!audio_renderer_ && !video_renderer_) {
|
| ErrorChangedTask(PIPELINE_ERROR_COULD_NOT_RENDER);
|
| return;
|
| }
|
|
|
| - is_initialized_ = true;
|
| + if (audio_renderer_)
|
| + time_source_ = audio_renderer_->GetTimeSource();
|
|
|
| {
|
| PipelineMetadata metadata;
|
| - metadata.has_audio = renderer_->HasAudio();
|
| - metadata.has_video = renderer_->HasVideo();
|
| + metadata.has_audio = audio_renderer_;
|
| + metadata.has_video = video_renderer_;
|
| metadata.timeline_offset = demuxer_->GetTimelineOffset();
|
| DemuxerStream* stream = demuxer_->GetStream(DemuxerStream::VIDEO);
|
| if (stream) {
|
| @@ -362,8 +422,17 @@
|
|
|
| base::ResetAndReturn(&seek_cb_).Run(PIPELINE_OK);
|
|
|
| - renderer_->StartPlayingFrom(start_timestamp_);
|
| -
|
| + {
|
| + base::AutoLock auto_lock(lock_);
|
| + interpolator_->SetBounds(start_timestamp_, start_timestamp_);
|
| + }
|
| +
|
| + if (time_source_)
|
| + time_source_->SetMediaTime(start_timestamp_);
|
| + if (audio_renderer_)
|
| + audio_renderer_->StartPlaying();
|
| + if (video_renderer_)
|
| + video_renderer_->StartPlaying();
|
| if (text_renderer_)
|
| text_renderer_->StartPlaying();
|
|
|
| @@ -380,17 +449,31 @@
|
| }
|
| }
|
|
|
| -// Note that the usage of base::Unretained() with the renderers is considered
|
| -// safe as they are owned by |pending_callbacks_| and share the same lifetime.
|
| +// Note that the usage of base::Unretained() with the audio/video renderers
|
| +// in the following DoXXX() functions is considered safe as they are owned by
|
| +// |pending_callbacks_| and share the same lifetime.
|
| //
|
| // That being said, deleting the renderers while keeping |pending_callbacks_|
|
| // running on the media thread would result in crashes.
|
| -void Pipeline::DoSeek(TimeDelta seek_timestamp,
|
| - const PipelineStatusCB& done_cb) {
|
| +
|
| +#if DCHECK_IS_ON
|
| +static void VerifyBufferingStates(BufferingState* audio_buffering_state,
|
| + BufferingState* video_buffering_state) {
|
| + DCHECK_EQ(*audio_buffering_state, BUFFERING_HAVE_NOTHING);
|
| + DCHECK_EQ(*video_buffering_state, BUFFERING_HAVE_NOTHING);
|
| +}
|
| +#endif
|
| +
|
| +void Pipeline::DoSeek(
|
| + base::TimeDelta seek_timestamp,
|
| + const PipelineStatusCB& done_cb) {
|
| DCHECK(task_runner_->BelongsToCurrentThread());
|
| DCHECK(!pending_callbacks_.get());
|
| - DCHECK_EQ(state_, kSeeking);
|
| SerialRunner::Queue bound_fns;
|
| + {
|
| + base::AutoLock auto_lock(lock_);
|
| + PauseClockAndStopTicking_Locked();
|
| + }
|
|
|
| // Pause.
|
| if (text_renderer_) {
|
| @@ -399,9 +482,22 @@
|
| }
|
|
|
| // Flush.
|
| - DCHECK(renderer_);
|
| - bound_fns.Push(
|
| - base::Bind(&Renderer::Flush, base::Unretained(renderer_.get())));
|
| + if (audio_renderer_) {
|
| + bound_fns.Push(base::Bind(
|
| + &AudioRenderer::Flush, base::Unretained(audio_renderer_.get())));
|
| + }
|
| +
|
| + if (video_renderer_) {
|
| + bound_fns.Push(base::Bind(
|
| + &VideoRenderer::Flush, base::Unretained(video_renderer_.get())));
|
| + }
|
| +
|
| +#if DCHECK_IS_ON
|
| + // Verify renderers reset their buffering states.
|
| + bound_fns.Push(base::Bind(&VerifyBufferingStates,
|
| + &audio_buffering_state_,
|
| + &video_buffering_state_));
|
| +#endif
|
|
|
| if (text_renderer_) {
|
| bound_fns.Push(base::Bind(
|
| @@ -420,7 +516,8 @@
|
| DCHECK(task_runner_->BelongsToCurrentThread());
|
| DCHECK(!pending_callbacks_.get());
|
|
|
| - renderer_.reset();
|
| + audio_renderer_.reset();
|
| + video_renderer_.reset();
|
| text_renderer_.reset();
|
|
|
| if (demuxer_) {
|
| @@ -435,9 +532,9 @@
|
| DVLOG(2) << __FUNCTION__;
|
| DCHECK(task_runner_->BelongsToCurrentThread());
|
| DCHECK_EQ(state_, kStopping);
|
| - DCHECK(!renderer_);
|
| + DCHECK(!audio_renderer_);
|
| + DCHECK(!video_renderer_);
|
| DCHECK(!text_renderer_);
|
| -
|
| {
|
| base::AutoLock l(lock_);
|
| running_ = false;
|
| @@ -472,7 +569,8 @@
|
| }
|
| }
|
|
|
| -void Pipeline::AddBufferedTimeRange(TimeDelta start, TimeDelta end) {
|
| +void Pipeline::AddBufferedTimeRange(base::TimeDelta start,
|
| + base::TimeDelta end) {
|
| DCHECK(IsRunning());
|
| base::AutoLock auto_lock(lock_);
|
| buffered_time_ranges_.Add(start, end);
|
| @@ -554,7 +652,13 @@
|
| if (state_ != kPlaying)
|
| return;
|
|
|
| - renderer_->SetPlaybackRate(playback_rate_);
|
| + {
|
| + base::AutoLock auto_lock(lock_);
|
| + interpolator_->SetPlaybackRate(playback_rate);
|
| + }
|
| +
|
| + if (time_source_)
|
| + time_source_->SetPlaybackRate(playback_rate_);
|
| }
|
|
|
| void Pipeline::VolumeChangedTask(float volume) {
|
| @@ -564,7 +668,8 @@
|
| if (state_ != kPlaying)
|
| return;
|
|
|
| - renderer_->SetVolume(volume);
|
| + if (audio_renderer_)
|
| + audio_renderer_->SetVolume(volume);
|
| }
|
|
|
| void Pipeline::SeekTask(TimeDelta time, const PipelineStatusCB& seek_cb) {
|
| @@ -587,27 +692,48 @@
|
|
|
| SetState(kSeeking);
|
| seek_cb_ = seek_cb;
|
| - renderer_ended_ = false;
|
| - text_renderer_ended_ = false;
|
| + audio_ended_ = false;
|
| + video_ended_ = false;
|
| + text_ended_ = false;
|
| start_timestamp_ = time;
|
|
|
| DoSeek(time,
|
| base::Bind(&Pipeline::OnStateTransition, weak_factory_.GetWeakPtr()));
|
| }
|
|
|
| -void Pipeline::OnRendererEnded() {
|
| - DCHECK(task_runner_->BelongsToCurrentThread());
|
| - media_log_->AddEvent(media_log_->CreateEvent(MediaLogEvent::ENDED));
|
| +void Pipeline::OnAudioRendererEnded() {
|
| + DCHECK(task_runner_->BelongsToCurrentThread());
|
| + media_log_->AddEvent(media_log_->CreateEvent(MediaLogEvent::AUDIO_ENDED));
|
|
|
| if (state_ != kPlaying)
|
| return;
|
|
|
| - DCHECK(!renderer_ended_);
|
| - renderer_ended_ = true;
|
| + DCHECK(!audio_ended_);
|
| + audio_ended_ = true;
|
| +
|
| + // Start clock since there is no more audio to trigger clock updates.
|
| + {
|
| + base::AutoLock auto_lock(lock_);
|
| + interpolator_->SetUpperBound(duration_);
|
| + StartClockIfWaitingForTimeUpdate_Locked();
|
| + }
|
|
|
| RunEndedCallbackIfNeeded();
|
| }
|
|
|
| +void Pipeline::OnVideoRendererEnded() {
|
| + DCHECK(task_runner_->BelongsToCurrentThread());
|
| + media_log_->AddEvent(media_log_->CreateEvent(MediaLogEvent::VIDEO_ENDED));
|
| +
|
| + if (state_ != kPlaying)
|
| + return;
|
| +
|
| + DCHECK(!video_ended_);
|
| + video_ended_ = true;
|
| +
|
| + RunEndedCallbackIfNeeded();
|
| +}
|
| +
|
| void Pipeline::OnTextRendererEnded() {
|
| DCHECK(task_runner_->BelongsToCurrentThread());
|
| media_log_->AddEvent(media_log_->CreateEvent(MediaLogEvent::TEXT_ENDED));
|
| @@ -615,8 +741,8 @@
|
| if (state_ != kPlaying)
|
| return;
|
|
|
| - DCHECK(!text_renderer_ended_);
|
| - text_renderer_ended_ = true;
|
| + DCHECK(!text_ended_);
|
| + text_ended_ = true;
|
|
|
| RunEndedCallbackIfNeeded();
|
| }
|
| @@ -624,11 +750,20 @@
|
| void Pipeline::RunEndedCallbackIfNeeded() {
|
| DCHECK(task_runner_->BelongsToCurrentThread());
|
|
|
| - if (renderer_ && !renderer_ended_)
|
| - return;
|
| -
|
| - if (text_renderer_ && text_renderer_->HasTracks() && !text_renderer_ended_)
|
| - return;
|
| + if (audio_renderer_ && !audio_ended_)
|
| + return;
|
| +
|
| + if (video_renderer_ && !video_ended_)
|
| + return;
|
| +
|
| + if (text_renderer_ && text_renderer_->HasTracks() && !text_ended_)
|
| + return;
|
| +
|
| + {
|
| + base::AutoLock auto_lock(lock_);
|
| + PauseClockAndStopTicking_Locked();
|
| + interpolator_->SetBounds(duration_, duration_);
|
| + }
|
|
|
| DCHECK_EQ(status_, PIPELINE_OK);
|
| ended_cb_.Run();
|
| @@ -654,25 +789,144 @@
|
| demuxer_->Initialize(this, done_cb, text_renderer_);
|
| }
|
|
|
| -void Pipeline::InitializeRenderer(const PipelineStatusCB& done_cb) {
|
| - DCHECK(task_runner_->BelongsToCurrentThread());
|
| -
|
| - renderer_ = filter_collection_->GetRenderer();
|
| -
|
| +void Pipeline::InitializeAudioRenderer(const PipelineStatusCB& done_cb) {
|
| + DCHECK(task_runner_->BelongsToCurrentThread());
|
| +
|
| + audio_renderer_ = filter_collection_->GetAudioRenderer();
|
| base::WeakPtr<Pipeline> weak_this = weak_factory_.GetWeakPtr();
|
| - renderer_->Initialize(
|
| + audio_renderer_->Initialize(
|
| + demuxer_->GetStream(DemuxerStream::AUDIO),
|
| done_cb,
|
| base::Bind(&Pipeline::OnUpdateStatistics, weak_this),
|
| - base::Bind(&Pipeline::OnRendererEnded, weak_this),
|
| + base::Bind(&Pipeline::OnAudioTimeUpdate, weak_this),
|
| + base::Bind(&Pipeline::BufferingStateChanged, weak_this,
|
| + &audio_buffering_state_),
|
| + base::Bind(&Pipeline::OnAudioRendererEnded, weak_this),
|
| + base::Bind(&Pipeline::OnError, weak_this));
|
| +}
|
| +
|
| +void Pipeline::InitializeVideoRenderer(const PipelineStatusCB& done_cb) {
|
| + DCHECK(task_runner_->BelongsToCurrentThread());
|
| +
|
| + video_renderer_ = filter_collection_->GetVideoRenderer();
|
| + base::WeakPtr<Pipeline> weak_this = weak_factory_.GetWeakPtr();
|
| + video_renderer_->Initialize(
|
| + demuxer_->GetStream(DemuxerStream::VIDEO),
|
| + demuxer_->GetLiveness() == Demuxer::LIVENESS_LIVE,
|
| + done_cb,
|
| + base::Bind(&Pipeline::OnUpdateStatistics, weak_this),
|
| + base::Bind(&Pipeline::OnVideoTimeUpdate, weak_this),
|
| + base::Bind(&Pipeline::BufferingStateChanged, weak_this,
|
| + &video_buffering_state_),
|
| + base::Bind(&Pipeline::OnVideoRendererEnded, weak_this),
|
| base::Bind(&Pipeline::OnError, weak_this),
|
| - base::Bind(&Pipeline::BufferingStateChanged, weak_this),
|
| + base::Bind(&Pipeline::GetMediaTime, base::Unretained(this)),
|
| base::Bind(&Pipeline::GetMediaDuration, base::Unretained(this)));
|
| }
|
|
|
| -void Pipeline::BufferingStateChanged(BufferingState new_buffering_state) {
|
| - DVLOG(1) << __FUNCTION__ << "(" << new_buffering_state << ") ";
|
| - DCHECK(task_runner_->BelongsToCurrentThread());
|
| - buffering_state_cb_.Run(new_buffering_state);
|
| +void Pipeline::BufferingStateChanged(BufferingState* buffering_state,
|
| + BufferingState new_buffering_state) {
|
| + DVLOG(1) << __FUNCTION__ << "(" << *buffering_state << ", "
|
| + << " " << new_buffering_state << ") "
|
| + << (buffering_state == &audio_buffering_state_ ? "audio" : "video");
|
| + DCHECK(task_runner_->BelongsToCurrentThread());
|
| + bool was_waiting_for_enough_data = WaitingForEnoughData();
|
| +
|
| + *buffering_state = new_buffering_state;
|
| +
|
| + // Disable underflow by ignoring updates that renderers have ran out of data
|
| + // after we have started the clock.
|
| + if (state_ == kPlaying && underflow_disabled_for_testing_ &&
|
| + interpolation_state_ != INTERPOLATION_STOPPED) {
|
| + return;
|
| + }
|
| +
|
| + // Renderer underflowed.
|
| + if (!was_waiting_for_enough_data && WaitingForEnoughData()) {
|
| + PausePlayback();
|
| +
|
| + // TODO(scherkus): Fire BUFFERING_HAVE_NOTHING callback to alert clients of
|
| + // underflow state http://crbug.com/144683
|
| + return;
|
| + }
|
| +
|
| + // Renderer prerolled.
|
| + if (was_waiting_for_enough_data && !WaitingForEnoughData()) {
|
| + StartPlayback();
|
| + buffering_state_cb_.Run(BUFFERING_HAVE_ENOUGH);
|
| + return;
|
| + }
|
| +}
|
| +
|
| +bool Pipeline::WaitingForEnoughData() const {
|
| + DCHECK(task_runner_->BelongsToCurrentThread());
|
| + if (state_ != kPlaying)
|
| + return false;
|
| + if (audio_renderer_ && audio_buffering_state_ != BUFFERING_HAVE_ENOUGH)
|
| + return true;
|
| + if (video_renderer_ && video_buffering_state_ != BUFFERING_HAVE_ENOUGH)
|
| + return true;
|
| + return false;
|
| +}
|
| +
|
| +void Pipeline::PausePlayback() {
|
| + DVLOG(1) << __FUNCTION__;
|
| + DCHECK_EQ(state_, kPlaying);
|
| + DCHECK(WaitingForEnoughData());
|
| + DCHECK(task_runner_->BelongsToCurrentThread());
|
| +
|
| + base::AutoLock auto_lock(lock_);
|
| + PauseClockAndStopTicking_Locked();
|
| +}
|
| +
|
| +void Pipeline::StartPlayback() {
|
| + DVLOG(1) << __FUNCTION__;
|
| + DCHECK_EQ(state_, kPlaying);
|
| + DCHECK_EQ(interpolation_state_, INTERPOLATION_STOPPED);
|
| + DCHECK(!WaitingForEnoughData());
|
| + DCHECK(task_runner_->BelongsToCurrentThread());
|
| +
|
| + if (time_source_) {
|
| + // We use audio stream to update the clock. So if there is such a
|
| + // stream, we pause the clock until we receive a valid timestamp.
|
| + base::AutoLock auto_lock(lock_);
|
| + interpolation_state_ = INTERPOLATION_WAITING_FOR_AUDIO_TIME_UPDATE;
|
| + time_source_->StartTicking();
|
| + } else {
|
| + base::AutoLock auto_lock(lock_);
|
| + interpolation_state_ = INTERPOLATION_STARTED;
|
| + interpolator_->SetUpperBound(duration_);
|
| + interpolator_->StartInterpolating();
|
| + }
|
| +}
|
| +
|
| +void Pipeline::PauseClockAndStopTicking_Locked() {
|
| + lock_.AssertAcquired();
|
| + switch (interpolation_state_) {
|
| + case INTERPOLATION_STOPPED:
|
| + return;
|
| +
|
| + case INTERPOLATION_WAITING_FOR_AUDIO_TIME_UPDATE:
|
| + time_source_->StopTicking();
|
| + break;
|
| +
|
| + case INTERPOLATION_STARTED:
|
| + if (time_source_)
|
| + time_source_->StopTicking();
|
| + interpolator_->StopInterpolating();
|
| + break;
|
| + }
|
| +
|
| + interpolation_state_ = INTERPOLATION_STOPPED;
|
| +}
|
| +
|
| +void Pipeline::StartClockIfWaitingForTimeUpdate_Locked() {
|
| + lock_.AssertAcquired();
|
| + if (interpolation_state_ != INTERPOLATION_WAITING_FOR_AUDIO_TIME_UPDATE)
|
| + return;
|
| +
|
| + interpolation_state_ = INTERPOLATION_STARTED;
|
| + interpolator_->StartInterpolating();
|
| }
|
|
|
| } // namespace media
|
|
|