Index: content/renderer/media/renderer_webaudiodevice_impl.cc |
diff --git a/content/renderer/media/renderer_webaudiodevice_impl.cc b/content/renderer/media/renderer_webaudiodevice_impl.cc |
index a9fba3b4e21ec8f451330dc4d22977766a1eed8c..1237d14df507e29752429098e531b95bf31c9d16 100644 |
--- a/content/renderer/media/renderer_webaudiodevice_impl.cc |
+++ b/content/renderer/media/renderer_webaudiodevice_impl.cc |
@@ -22,6 +22,7 @@ |
#include "third_party/WebKit/public/web/WebView.h" |
using blink::WebAudioDevice; |
+using blink::WebAudioTimestamp; |
using blink::WebLocalFrame; |
using blink::WebVector; |
using blink::WebView; |
@@ -102,7 +103,8 @@ double RendererWebAudioDeviceImpl::sampleRate() { |
int RendererWebAudioDeviceImpl::Render(media::AudioBus* dest, |
uint32_t frames_delayed, |
- uint32_t frames_skipped) { |
+ uint32_t frames_skipped, |
+ const media::AudioTimestamp& timestamp) { |
#if defined(OS_ANDROID) |
if (is_first_buffer_after_silence_) { |
DCHECK(!is_using_null_audio_sink_); |
@@ -120,9 +122,15 @@ int RendererWebAudioDeviceImpl::Render(media::AudioBus* dest, |
// TODO(xians): Remove the following |web_audio_source_data| after |
// changing the blink interface. |
WebVector<float*> web_audio_source_data(static_cast<size_t>(0)); |
+ |
+ double seconds = timestamp.ticks |
+ / static_cast<double>(base::Time::kMicrosecondsPerSecond); |
+ WebAudioTimestamp web_audio_timestamp(static_cast<size_t>(timestamp.frames), |
+ seconds); |
client_callback_->render(web_audio_source_data, |
web_audio_dest_data, |
- dest->frames()); |
+ dest->frames(), |
+ web_audio_timestamp); |
#if defined(OS_ANDROID) |
const bool is_zero = dest->AreFramesZero(); |