Index: content/renderer/media/renderer_webaudiodevice_impl.cc |
diff --git a/content/renderer/media/renderer_webaudiodevice_impl.cc b/content/renderer/media/renderer_webaudiodevice_impl.cc |
index fc698e4d6a3fb43a79d3e4e844bf4b6344285b4e..bd57422f52bed02d61b9cc09f4f03b842282f633 100644 |
--- a/content/renderer/media/renderer_webaudiodevice_impl.cc |
+++ b/content/renderer/media/renderer_webaudiodevice_impl.cc |
@@ -20,22 +20,96 @@ |
#include "third_party/WebKit/public/web/WebView.h" |
using blink::WebAudioDevice; |
+using blink::WebAudioLatencyHint; |
using blink::WebLocalFrame; |
using blink::WebVector; |
using blink::WebView; |
namespace content { |
+namespace { |
+ |
+AudioDeviceFactory::SourceType GetLatencyHintSourceType( |
+ WebAudioLatencyHint::Category latency_category) { |
+ switch (latency_category) { |
+ case WebAudioLatencyHint::CategoryInteractive: |
+ return AudioDeviceFactory::kSourceWebAudioInteractive; |
+ case WebAudioLatencyHint::CategoryBalanced: |
+ return AudioDeviceFactory::kSourceWebAudioBalanced; |
+ case WebAudioLatencyHint::CategoryPlayback: |
+ return AudioDeviceFactory::kSourceWebAudioPlayback; |
+ } |
+ NOTREACHED(); |
+ return AudioDeviceFactory::kSourceWebAudioInteractive; |
+} |
+ |
+int FrameIdFromCurrentContext() { |
+ // Assumption: This method is being invoked within a V8 call stack. CHECKs |
+ // will fail in the call to frameForCurrentContext() otherwise. |
+ // |
+ // Therefore, we can perform look-ups to determine which RenderView is |
+ // starting the audio device. The reason for all this is because the creator |
+ // of the WebAudio objects might not be the actual source of the audio (e.g., |
+ // an extension creates a object that is passed and used within a page). |
+ blink::WebLocalFrame* const web_frame = |
+ blink::WebLocalFrame::frameForCurrentContext(); |
+ RenderFrame* const render_frame = RenderFrame::FromWebFrame(web_frame); |
+ return render_frame ? render_frame->GetRoutingID() : MSG_ROUTING_NONE; |
+} |
+ |
+} // namespace |
+ |
RendererWebAudioDeviceImpl::RendererWebAudioDeviceImpl( |
- const media::AudioParameters& params, |
+ media::ChannelLayout layout, |
+ const blink::WebAudioLatencyHint& latency_hint, |
WebAudioDevice::RenderCallback* callback, |
int session_id, |
const url::Origin& security_origin) |
- : params_(params), |
+ : latency_hint_(latency_hint), |
client_callback_(callback), |
session_id_(session_id), |
security_origin_(security_origin) { |
DCHECK(client_callback_); |
+ |
+ media::AudioParameters hardware_params( |
+ AudioDeviceFactory::GetOutputDeviceInfo(FrameIdFromCurrentContext(), |
o1ka
2016/12/02 12:02:12
Maybe cache render frame id in a member, instead o
Andrew MacPherson
2016/12/05 14:12:52
Done. Also added a DCHECK() in the constructor to
|
+ session_id_, std::string(), |
+ security_origin_) |
+ .output_params()); |
+ |
+ int output_buffer_size = 0; |
+ |
+ media::AudioLatency::LatencyType latency = |
+ AudioDeviceFactory::GetSourceLatencyType( |
+ GetLatencyHintSourceType(latency_hint_.category())); |
+ |
+ // Adjust output buffer size according to the latency requirement. |
+ switch (latency) { |
+ case media::AudioLatency::LATENCY_INTERACTIVE: |
+ output_buffer_size = media::AudioLatency::GetInteractiveBufferSize( |
+ hardware_params.frames_per_buffer()); |
+ break; |
+ case media::AudioLatency::LATENCY_RTC: |
+ output_buffer_size = media::AudioLatency::GetRtcBufferSize( |
+ hardware_params.sample_rate(), hardware_params.frames_per_buffer()); |
+ break; |
+ case media::AudioLatency::LATENCY_PLAYBACK: |
+ output_buffer_size = media::AudioLatency::GetHighLatencyBufferSize( |
+ hardware_params.sample_rate(), 0); |
+ break; |
+ case media::AudioLatency::LATENCY_EXACT_MS: |
+ // TODO(olka): add support when WebAudio requires it. |
+ default: |
+ NOTREACHED(); |
+ } |
+ |
+ DCHECK_NE(output_buffer_size, 0); |
+ |
+ sink_params_.Reset(media::AudioParameters::AUDIO_PCM_LOW_LATENCY, layout, |
+ hardware_params.sample_rate(), 16, output_buffer_size); |
+ |
+ // Specify the latency info to be passed to the browser side. |
+ sink_params_.set_latency_tag(latency); |
} |
RendererWebAudioDeviceImpl::~RendererWebAudioDeviceImpl() { |
@@ -48,36 +122,21 @@ void RendererWebAudioDeviceImpl::start() { |
if (sink_) |
return; // Already started. |
- // Assumption: This method is being invoked within a V8 call stack. CHECKs |
- // will fail in the call to frameForCurrentContext() otherwise. |
- // |
- // Therefore, we can perform look-ups to determine which RenderView is |
- // starting the audio device. The reason for all this is because the creator |
- // of the WebAudio objects might not be the actual source of the audio (e.g., |
- // an extension creates a object that is passed and used within a page). |
- WebLocalFrame* const web_frame = WebLocalFrame::frameForCurrentContext(); |
- RenderFrame* const render_frame = |
- web_frame ? RenderFrame::FromWebFrame(web_frame) : NULL; |
sink_ = AudioDeviceFactory::NewAudioRendererSink( |
- AudioDeviceFactory::kSourceWebAudioInteractive, |
- render_frame ? render_frame->GetRoutingID() : MSG_ROUTING_NONE, |
- session_id_, std::string(), security_origin_); |
- |
- // Specify the latency info to be passed to the browser side. |
- media::AudioParameters sink_params(params_); |
- sink_params.set_latency_tag(AudioDeviceFactory::GetSourceLatencyType( |
- AudioDeviceFactory::kSourceWebAudioInteractive)); |
+ GetLatencyHintSourceType(latency_hint_.category()), |
+ FrameIdFromCurrentContext(), session_id_, std::string(), |
+ security_origin_); |
#if defined(OS_ANDROID) |
// Use the media thread instead of the render thread for fake Render() calls |
// since it has special connotations for Blink and garbage collection. Timeout |
// value chosen to be highly unlikely in the normal case. |
webaudio_suspender_.reset(new media::SilentSinkSuspender( |
- this, base::TimeDelta::FromSeconds(30), sink_params, sink_, |
+ this, base::TimeDelta::FromSeconds(30), sink_params_, sink_, |
RenderThreadImpl::current()->GetMediaThreadTaskRunner())); |
- sink_->Initialize(sink_params, webaudio_suspender_.get()); |
+ sink_->Initialize(sink_params_, webaudio_suspender_.get()); |
#else |
- sink_->Initialize(sink_params, this); |
+ sink_->Initialize(sink_params_, this); |
#endif |
sink_->Start(); |
@@ -97,7 +156,11 @@ void RendererWebAudioDeviceImpl::stop() { |
} |
double RendererWebAudioDeviceImpl::sampleRate() { |
- return params_.sample_rate(); |
+ return sink_params_.sample_rate(); |
+} |
+ |
+int RendererWebAudioDeviceImpl::framesPerBuffer() { |
+ return sink_params_.frames_per_buffer(); |
} |
int RendererWebAudioDeviceImpl::Render(media::AudioBus* dest, |