| Index: media/base/android/media_source_player.cc
|
| diff --git a/media/base/android/media_source_player.cc b/media/base/android/media_source_player.cc
|
| index 8b19de7f8932fca8807f95ef7218b684623588ab..e52d0e49fc2e651e0d6352b391a96432fc8cb6bc 100644
|
| --- a/media/base/android/media_source_player.cc
|
| +++ b/media/base/android/media_source_player.cc
|
| @@ -19,7 +19,15 @@
|
| #include "media/base/android/media_drm_bridge.h"
|
| #include "media/base/android/media_player_manager.h"
|
| #include "media/base/android/video_decoder_job.h"
|
| +#include "media/base/audio_timestamp_helper.h"
|
| #include "media/base/buffers.h"
|
| +
|
| +namespace {
|
| +
|
| +// Use 16bit PCM for audio output. Keep this value in sync with the output
|
| +// format we passed to AudioTrack in MediaCodecBridge.
|
| +const int kBytesPerAudioOutputSample = 2;
|
| +}
|
|
|
| namespace media {
|
|
|
| @@ -109,6 +117,8 @@
|
| pending_seek_ = false;
|
|
|
| clock_.SetTime(seek_time, seek_time);
|
| + if (audio_timestamp_helper_)
|
| + audio_timestamp_helper_->SetBaseTimestamp(seek_time);
|
|
|
| if (audio_decoder_job_ && audio_decoder_job_->is_decoding())
|
| audio_decoder_job_->StopDecode();
|
| @@ -305,6 +315,14 @@
|
| sampling_rate_ = configs.audio_sampling_rate;
|
| is_audio_encrypted_ = configs.is_audio_encrypted;
|
| audio_extra_data_ = configs.audio_extra_data;
|
| + if (HasAudio()) {
|
| + DCHECK_GT(num_channels_, 0);
|
| + audio_timestamp_helper_.reset(new AudioTimestampHelper(sampling_rate_));
|
| + audio_timestamp_helper_->SetBaseTimestamp(GetCurrentTime());
|
| + } else {
|
| + audio_timestamp_helper_.reset();
|
| + }
|
| +
|
| video_codec_ = configs.video_codec;
|
| width_ = configs.video_size.width();
|
| height_ = configs.video_size.height();
|
| @@ -426,8 +444,8 @@
|
| DVLOG(1) << __FUNCTION__ << " : setting clock to actual browser seek time: "
|
| << seek_time.InSecondsF();
|
| clock_.SetTime(seek_time, seek_time);
|
| - if (audio_decoder_job_)
|
| - audio_decoder_job_->SetBaseTimestamp(seek_time);
|
| + if (audio_timestamp_helper_)
|
| + audio_timestamp_helper_->SetBaseTimestamp(seek_time);
|
| } else {
|
| DCHECK(actual_browser_seek_time == kNoTimestamp());
|
| }
|
| @@ -453,10 +471,16 @@
|
| }
|
|
|
| void MediaSourcePlayer::UpdateTimestamps(
|
| - base::TimeDelta current_presentation_timestamp,
|
| - base::TimeDelta max_presentation_timestamp) {
|
| - clock_.SetTime(current_presentation_timestamp, max_presentation_timestamp);
|
| -
|
| + base::TimeDelta presentation_timestamp, size_t audio_output_bytes) {
|
| + base::TimeDelta new_max_time = presentation_timestamp;
|
| +
|
| + if (audio_output_bytes > 0) {
|
| + audio_timestamp_helper_->AddFrames(
|
| + audio_output_bytes / (kBytesPerAudioOutputSample * num_channels_));
|
| + new_max_time = audio_timestamp_helper_->GetTimestamp();
|
| + }
|
| +
|
| + clock_.SetMaxTime(new_max_time);
|
| manager()->OnTimeUpdate(player_id(), GetCurrentTime());
|
| }
|
|
|
| @@ -486,8 +510,6 @@
|
| if (IsEventPending(SEEK_EVENT_PENDING)) {
|
| DVLOG(1) << __FUNCTION__ << " : Handling SEEK_EVENT";
|
| ClearDecodingData();
|
| - if (audio_decoder_job_)
|
| - audio_decoder_job_->SetBaseTimestamp(GetCurrentTime());
|
| demuxer_->RequestDemuxerSeek(GetCurrentTime(), doing_browser_seek_);
|
| return;
|
| }
|
| @@ -559,8 +581,7 @@
|
|
|
| void MediaSourcePlayer::MediaDecoderCallback(
|
| bool is_audio, MediaCodecStatus status,
|
| - base::TimeDelta current_presentation_timestamp,
|
| - base::TimeDelta max_presentation_timestamp) {
|
| + base::TimeDelta presentation_timestamp, size_t audio_output_bytes) {
|
| DVLOG(1) << __FUNCTION__ << ": " << is_audio << ", " << status;
|
|
|
| // TODO(xhwang): Drop IntToString() when http://crbug.com/303899 is fixed.
|
| @@ -604,12 +625,6 @@
|
| return;
|
| }
|
|
|
| - if (status == MEDIA_CODEC_OK && is_clock_manager &&
|
| - current_presentation_timestamp != kNoTimestamp()) {
|
| - UpdateTimestamps(
|
| - current_presentation_timestamp, max_presentation_timestamp);
|
| - }
|
| -
|
| if (status == MEDIA_CODEC_OUTPUT_END_OF_STREAM)
|
| PlaybackCompleted(is_audio);
|
|
|
| @@ -620,6 +635,11 @@
|
|
|
| if (status == MEDIA_CODEC_OUTPUT_END_OF_STREAM)
|
| return;
|
| +
|
| + if (status == MEDIA_CODEC_OK && is_clock_manager &&
|
| + presentation_timestamp != kNoTimestamp()) {
|
| + UpdateTimestamps(presentation_timestamp, audio_output_bytes);
|
| + }
|
|
|
| if (!playing_) {
|
| if (is_clock_manager)
|
| @@ -642,9 +662,8 @@
|
| // If we have a valid timestamp, start the starvation callback. Otherwise,
|
| // reset the |start_time_ticks_| so that the next frame will not suffer
|
| // from the decoding delay caused by the current frame.
|
| - if (current_presentation_timestamp != kNoTimestamp())
|
| - StartStarvationCallback(current_presentation_timestamp,
|
| - max_presentation_timestamp);
|
| + if (presentation_timestamp != kNoTimestamp())
|
| + StartStarvationCallback(presentation_timestamp);
|
| else
|
| start_time_ticks_ = base::TimeTicks::Now();
|
| }
|
| @@ -792,13 +811,6 @@
|
|
|
| if (audio_decoder_job_) {
|
| SetVolumeInternal();
|
| - // Need to reset the base timestamp in |audio_decoder_job_|.
|
| - // TODO(qinmin): When reconfiguring the |audio_decoder_job_|, there might
|
| - // still be some audio frames in the decoder or in AudioTrack. Therefore,
|
| - // we are losing some time here. http://crbug.com/357726.
|
| - base::TimeDelta current_time = GetCurrentTime();
|
| - audio_decoder_job_->SetBaseTimestamp(current_time);
|
| - clock_.SetTime(current_time, current_time);
|
| audio_decoder_job_->BeginPrerolling(preroll_timestamp_);
|
| reconfig_audio_decoder_ = false;
|
| }
|
| @@ -900,8 +912,7 @@
|
| }
|
|
|
| void MediaSourcePlayer::StartStarvationCallback(
|
| - base::TimeDelta current_presentation_timestamp,
|
| - base::TimeDelta max_presentation_timestamp) {
|
| + base::TimeDelta presentation_timestamp) {
|
| // 20ms was chosen because it is the typical size of a compressed audio frame.
|
| // Anything smaller than this would likely cause unnecessary cycling in and
|
| // out of the prefetch state.
|
| @@ -911,16 +922,16 @@
|
| base::TimeDelta current_timestamp = GetCurrentTime();
|
| base::TimeDelta timeout;
|
| if (HasAudio()) {
|
| - timeout = max_presentation_timestamp - current_timestamp;
|
| + timeout = audio_timestamp_helper_->GetTimestamp() - current_timestamp;
|
| } else {
|
| - DCHECK(current_timestamp <= current_presentation_timestamp);
|
| + DCHECK(current_timestamp <= presentation_timestamp);
|
|
|
| // For video only streams, fps can be estimated from the difference
|
| // between the previous and current presentation timestamps. The
|
| // previous presentation timestamp is equal to current_timestamp.
|
| // TODO(qinmin): determine whether 2 is a good coefficient for estimating
|
| // video frame timeout.
|
| - timeout = 2 * (current_presentation_timestamp - current_timestamp);
|
| + timeout = 2 * (presentation_timestamp - current_timestamp);
|
| }
|
|
|
| timeout = std::max(timeout, kMinStarvationTimeout);
|
|
|