Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(260)

Unified Diff: content/renderer/media/renderer_webaudiodevice_impl.cc

Issue 2501863003: Support for AudioContextOptions latencyHint. (Closed)
Patch Set: Fix WebAudioDeviceImpl unit test on Android. Created 3 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: content/renderer/media/renderer_webaudiodevice_impl.cc
diff --git a/content/renderer/media/renderer_webaudiodevice_impl.cc b/content/renderer/media/renderer_webaudiodevice_impl.cc
index c9d49bdd07124df75c1ee701aa9da028571bfbd5..6bb147a567673a1f08c622cfe897ff60ad4b2789 100644
--- a/content/renderer/media/renderer_webaudiodevice_impl.cc
+++ b/content/renderer/media/renderer_webaudiodevice_impl.cc
@@ -8,6 +8,7 @@
#include <string>
+#include "base/bind.h"
#include "base/command_line.h"
#include "base/logging.h"
#include "base/time/time.h"
@@ -21,22 +22,122 @@
#include "third_party/WebKit/public/web/WebView.h"
using blink::WebAudioDevice;
+using blink::WebAudioLatencyHint;
using blink::WebLocalFrame;
using blink::WebVector;
using blink::WebView;
namespace content {
+namespace {
+
+AudioDeviceFactory::SourceType GetLatencyHintSourceType(
+ WebAudioLatencyHint::AudioContextLatencyCategory latency_category) {
+ switch (latency_category) {
+ case WebAudioLatencyHint::kCategoryInteractive:
+ return AudioDeviceFactory::kSourceWebAudioInteractive;
+ case WebAudioLatencyHint::kCategoryBalanced:
+ return AudioDeviceFactory::kSourceWebAudioBalanced;
+ case WebAudioLatencyHint::kCategoryPlayback:
+ return AudioDeviceFactory::kSourceWebAudioPlayback;
+ case WebAudioLatencyHint::kCategoryExact:
+ // TODO implement CategoryExact
+ return AudioDeviceFactory::kSourceWebAudioInteractive;
+ }
+ NOTREACHED();
+ return AudioDeviceFactory::kSourceWebAudioInteractive;
+}
+
+int FrameIdFromCurrentContext() {
+ // Assumption: This method is being invoked within a V8 call stack. CHECKs
+ // will fail in the call to frameForCurrentContext() otherwise.
+ //
+ // Therefore, we can perform look-ups to determine which RenderView is
+ // starting the audio device. The reason for all this is because the creator
+ // of the WebAudio objects might not be the actual source of the audio (e.g.,
+ // an extension creates a object that is passed and used within a page).
+ blink::WebLocalFrame* const web_frame =
+ blink::WebLocalFrame::frameForCurrentContext();
+ RenderFrame* const render_frame = RenderFrame::FromWebFrame(web_frame);
+ return render_frame ? render_frame->GetRoutingID() : MSG_ROUTING_NONE;
+}
+
+media::AudioParameters GetOutputDeviceParameters(
+ int frame_id,
+ int session_id,
+ const std::string& device_id,
+ const url::Origin& security_origin) {
+ return AudioDeviceFactory::GetOutputDeviceInfo(frame_id, session_id,
+ device_id, security_origin)
+ .output_params();
+}
+
+} // namespace
+
+RendererWebAudioDeviceImpl* RendererWebAudioDeviceImpl::Create(
+ media::ChannelLayout layout,
+ const blink::WebAudioLatencyHint& latency_hint,
+ WebAudioDevice::RenderCallback* callback,
+ int session_id,
+ const url::Origin& security_origin) {
+ return new RendererWebAudioDeviceImpl(layout, latency_hint, callback,
+ session_id, security_origin,
+ base::Bind(&GetOutputDeviceParameters),
+ base::Bind(&FrameIdFromCurrentContext));
+}
+
RendererWebAudioDeviceImpl::RendererWebAudioDeviceImpl(
- const media::AudioParameters& params,
+ media::ChannelLayout layout,
+ const blink::WebAudioLatencyHint& latency_hint,
WebAudioDevice::RenderCallback* callback,
int session_id,
- const url::Origin& security_origin)
- : params_(params),
+ const url::Origin& security_origin,
+ const OutputDeviceParamsCallback& device_params_cb,
+ const RenderFrameIdCallback& render_frame_id_cb)
+ : latency_hint_(latency_hint),
client_callback_(callback),
session_id_(session_id),
- security_origin_(security_origin) {
+ security_origin_(security_origin),
+ frame_id_(render_frame_id_cb.Run()) {
DCHECK(client_callback_);
+ DCHECK_NE(frame_id_, MSG_ROUTING_NONE);
+
+ media::AudioParameters hardware_params(device_params_cb.Run(
+ frame_id_, session_id_, std::string(), security_origin_));
+
+ int output_buffer_size = 0;
+
+ media::AudioLatency::LatencyType latency =
+ AudioDeviceFactory::GetSourceLatencyType(
+ GetLatencyHintSourceType(latency_hint_.category()));
+
+ // Adjust output buffer size according to the latency requirement.
+ switch (latency) {
+ case media::AudioLatency::LATENCY_INTERACTIVE:
+ output_buffer_size = media::AudioLatency::GetInteractiveBufferSize(
+ hardware_params.frames_per_buffer());
+ break;
+ case media::AudioLatency::LATENCY_RTC:
+ output_buffer_size = media::AudioLatency::GetRtcBufferSize(
+ hardware_params.sample_rate(), hardware_params.frames_per_buffer());
+ break;
+ case media::AudioLatency::LATENCY_PLAYBACK:
+ output_buffer_size = media::AudioLatency::GetHighLatencyBufferSize(
+ hardware_params.sample_rate(), 0);
+ break;
+ case media::AudioLatency::LATENCY_EXACT_MS:
+ // TODO(olka): add support when WebAudio requires it.
+ default:
+ NOTREACHED();
+ }
+
+ DCHECK_NE(output_buffer_size, 0);
+
+ sink_params_.Reset(media::AudioParameters::AUDIO_PCM_LOW_LATENCY, layout,
+ hardware_params.sample_rate(), 16, output_buffer_size);
+
+ // Specify the latency info to be passed to the browser side.
+ sink_params_.set_latency_tag(latency);
}
RendererWebAudioDeviceImpl::~RendererWebAudioDeviceImpl() {
@@ -49,36 +150,20 @@ void RendererWebAudioDeviceImpl::start() {
if (sink_)
return; // Already started.
- // Assumption: This method is being invoked within a V8 call stack. CHECKs
- // will fail in the call to frameForCurrentContext() otherwise.
- //
- // Therefore, we can perform look-ups to determine which RenderView is
- // starting the audio device. The reason for all this is because the creator
- // of the WebAudio objects might not be the actual source of the audio (e.g.,
- // an extension creates a object that is passed and used within a page).
- WebLocalFrame* const web_frame = WebLocalFrame::frameForCurrentContext();
- RenderFrame* const render_frame =
- web_frame ? RenderFrame::FromWebFrame(web_frame) : NULL;
sink_ = AudioDeviceFactory::NewAudioRendererSink(
- AudioDeviceFactory::kSourceWebAudioInteractive,
- render_frame ? render_frame->GetRoutingID() : MSG_ROUTING_NONE,
+ GetLatencyHintSourceType(latency_hint_.category()), frame_id_,
session_id_, std::string(), security_origin_);
- // Specify the latency info to be passed to the browser side.
- media::AudioParameters sink_params(params_);
- sink_params.set_latency_tag(AudioDeviceFactory::GetSourceLatencyType(
- AudioDeviceFactory::kSourceWebAudioInteractive));
-
#if defined(OS_ANDROID)
// Use the media thread instead of the render thread for fake Render() calls
// since it has special connotations for Blink and garbage collection. Timeout
// value chosen to be highly unlikely in the normal case.
webaudio_suspender_.reset(new media::SilentSinkSuspender(
- this, base::TimeDelta::FromSeconds(30), sink_params, sink_,
- RenderThreadImpl::current()->GetMediaThreadTaskRunner()));
- sink_->Initialize(sink_params, webaudio_suspender_.get());
+ this, base::TimeDelta::FromSeconds(30), sink_params_, sink_,
+ GetMediaTaskRunner()));
+ sink_->Initialize(sink_params_, webaudio_suspender_.get());
#else
- sink_->Initialize(sink_params, this);
+ sink_->Initialize(sink_params_, this);
#endif
sink_->Start();
@@ -98,7 +183,11 @@ void RendererWebAudioDeviceImpl::stop() {
}
double RendererWebAudioDeviceImpl::sampleRate() {
- return params_.sample_rate();
+ return sink_params_.sample_rate();
+}
+
+int RendererWebAudioDeviceImpl::framesPerBuffer() {
+ return sink_params_.frames_per_buffer();
}
int RendererWebAudioDeviceImpl::Render(base::TimeDelta delay,
@@ -112,8 +201,8 @@ int RendererWebAudioDeviceImpl::Render(base::TimeDelta delay,
if (!delay.is_zero()) { // Zero values are send at the first call.
// Substruct the bus duration to get hardware delay.
- delay -= media::AudioTimestampHelper::FramesToTime(dest->frames(),
- params_.sample_rate());
+ delay -=
+ media::AudioTimestampHelper::FramesToTime(dest->frames(), sampleRate());
}
DCHECK_GE(delay, base::TimeDelta());
@@ -128,4 +217,18 @@ void RendererWebAudioDeviceImpl::OnRenderError() {
// TODO(crogers): implement error handling.
}
+void RendererWebAudioDeviceImpl::SetMediaTaskRunnerForTesting(
+ const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner) {
+ media_task_runner_ = media_task_runner;
+}
+
+const scoped_refptr<base::SingleThreadTaskRunner>&
+RendererWebAudioDeviceImpl::GetMediaTaskRunner() {
+ if (!media_task_runner_) {
+ media_task_runner_ =
+ RenderThreadImpl::current()->GetMediaThreadTaskRunner();
+ }
+ return media_task_runner_;
+}
+
} // namespace content

Powered by Google App Engine
This is Rietveld 408576698