Index: media/base/android/audio_decoder_job.cc |
diff --git a/media/base/android/audio_decoder_job.cc b/media/base/android/audio_decoder_job.cc |
index d089796ccc65cc1de197aa75b2b1f25a58b7ca7e..bb64c0f71106a66a619d2c673e7b5c7634ad9524 100644 |
--- a/media/base/android/audio_decoder_job.cc |
+++ b/media/base/android/audio_decoder_job.cc |
@@ -8,6 +8,14 @@ |
#include "base/lazy_instance.h" |
#include "base/threading/thread.h" |
#include "media/base/android/media_codec_bridge.h" |
+#include "media/base/audio_timestamp_helper.h" |
+ |
+namespace { |
+ |
+// Use 16bit PCM for audio output. Keep this value in sync with the output |
+// format we passed to AudioTrack in MediaCodecBridge. |
+const int kBytesPerAudioOutputSample = 2; |
+} |
namespace media { |
@@ -35,19 +43,26 @@ AudioDecoderJob* AudioDecoderJob::Create( |
scoped_ptr<AudioCodecBridge> codec(AudioCodecBridge::Create(audio_codec)); |
if (codec && codec->Start(audio_codec, sample_rate, channel_count, extra_data, |
extra_data_size, true, media_crypto)) { |
- return new AudioDecoderJob(codec.Pass(), request_data_cb); |
+ scoped_ptr<AudioTimestampHelper> audio_timestamp_helper( |
+ new AudioTimestampHelper(sample_rate)); |
+ return new AudioDecoderJob( |
+ audio_timestamp_helper.Pass(), codec.Pass(), |
+ kBytesPerAudioOutputSample * channel_count, request_data_cb); |
} |
- |
LOG(ERROR) << "Failed to create AudioDecoderJob."; |
return NULL; |
} |
AudioDecoderJob::AudioDecoderJob( |
+ scoped_ptr<AudioTimestampHelper> audio_timestamp_helper, |
scoped_ptr<AudioCodecBridge> audio_codec_bridge, |
+ int bytes_per_frame, |
const base::Closure& request_data_cb) |
: MediaDecoderJob(g_audio_decoder_thread.Pointer()->message_loop_proxy(), |
audio_codec_bridge.get(), request_data_cb), |
- audio_codec_bridge_(audio_codec_bridge.Pass()) { |
+ bytes_per_frame_(bytes_per_frame), |
+ audio_codec_bridge_(audio_codec_bridge.Pass()), |
+ audio_timestamp_helper_(audio_timestamp_helper.Pass()) { |
} |
AudioDecoderJob::~AudioDecoderJob() { |
@@ -57,17 +72,32 @@ void AudioDecoderJob::SetVolume(double volume) { |
audio_codec_bridge_->SetVolume(volume); |
} |
+void AudioDecoderJob::SetBaseTimestamp(base::TimeDelta base_timestamp) { |
+ audio_timestamp_helper_->SetBaseTimestamp(base_timestamp); |
+} |
+ |
void AudioDecoderJob::ReleaseOutputBuffer( |
int output_buffer_index, |
size_t size, |
bool render_output, |
+ base::TimeDelta current_presentation_timestamp, |
const ReleaseOutputCompletionCallback& callback) { |
size_t size_to_render = render_output ? size : 0u; |
- if (size_to_render) |
- audio_codec_bridge_->PlayOutputBuffer(output_buffer_index, size_to_render); |
+ current_presentation_timestamp = kNoTimestamp(); |
acolwell GONE FROM CHROMIUM
2014/04/14 16:00:47
nit: Move this to an else so it doesn't get set tw
qinmin
2014/04/14 19:01:58
Done.
|
+ if (size_to_render) { |
acolwell GONE FROM CHROMIUM
2014/04/14 16:00:47
nit: s/size_to_render/render_output/ here since si
qinmin
2014/04/14 19:01:58
0u is also a case we need to bypass due to an andr
|
+ int64 head_position = audio_codec_bridge_->PlayOutputBuffer( |
+ output_buffer_index, size_to_render); |
acolwell GONE FROM CHROMIUM
2014/04/14 16:00:47
nit: s/size_to_render/size/ ?
qinmin
2014/04/14 19:01:58
Done.
|
+ audio_timestamp_helper_->AddFrames(size / (bytes_per_frame_)); |
+ int64 frames_to_play = |
+ audio_timestamp_helper_->frame_count() - head_position; |
+ DCHECK_GE(frames_to_play, 0); |
+ current_presentation_timestamp = |
+ audio_timestamp_helper_->GetTimestamp() - |
+ audio_timestamp_helper_->GetFrameDuration(frames_to_play); |
+ } |
audio_codec_bridge_->ReleaseOutputBuffer(output_buffer_index, false); |
- |
- callback.Run(size_to_render); |
+ callback.Run(current_presentation_timestamp, |
+ audio_timestamp_helper_->GetTimestamp()); |
} |
bool AudioDecoderJob::ComputeTimeToRender() const { |