OLD | NEW |
---|---|
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/renderer/media/renderer_webaudiodevice_impl.h" | 5 #include "content/renderer/media/renderer_webaudiodevice_impl.h" |
6 | 6 |
7 #include <stddef.h> | 7 #include <stddef.h> |
8 | 8 |
9 #include <string> | 9 #include <string> |
10 | 10 |
11 #include "base/command_line.h" | 11 #include "base/command_line.h" |
12 #include "base/logging.h" | 12 #include "base/logging.h" |
13 #include "base/time/time.h" | 13 #include "base/time/time.h" |
14 #include "build/build_config.h" | 14 #include "build/build_config.h" |
15 #include "content/renderer/media/audio_device_factory.h" | 15 #include "content/renderer/media/audio_device_factory.h" |
16 #include "content/renderer/render_frame_impl.h" | 16 #include "content/renderer/render_frame_impl.h" |
17 #include "content/renderer/render_thread_impl.h" | 17 #include "content/renderer/render_thread_impl.h" |
18 #include "media/base/silent_sink_suspender.h" | 18 #include "media/base/silent_sink_suspender.h" |
19 #include "third_party/WebKit/public/web/WebLocalFrame.h" | 19 #include "third_party/WebKit/public/web/WebLocalFrame.h" |
20 #include "third_party/WebKit/public/web/WebView.h" | 20 #include "third_party/WebKit/public/web/WebView.h" |
21 | 21 |
22 using blink::WebAudioDevice; | 22 using blink::WebAudioDevice; |
23 using blink::WebAudioLatencyHint; | |
23 using blink::WebLocalFrame; | 24 using blink::WebLocalFrame; |
24 using blink::WebVector; | 25 using blink::WebVector; |
25 using blink::WebView; | 26 using blink::WebView; |
26 | 27 |
27 namespace content { | 28 namespace content { |
28 | 29 |
30 namespace { | |
31 | |
32 AudioDeviceFactory::SourceType GetLatencyHintSourceType( | |
33 WebAudioLatencyHint::Category latency_category) { | |
34 switch (latency_category) { | |
35 case WebAudioLatencyHint::CategoryInteractive: | |
36 return AudioDeviceFactory::kSourceWebAudioInteractive; | |
37 case WebAudioLatencyHint::CategoryBalanced: | |
38 return AudioDeviceFactory::kSourceWebAudioBalanced; | |
39 case WebAudioLatencyHint::CategoryPlayback: | |
40 return AudioDeviceFactory::kSourceWebAudioPlayback; | |
41 } | |
42 NOTREACHED(); | |
43 return AudioDeviceFactory::kSourceWebAudioInteractive; | |
44 } | |
45 | |
46 int FrameIdFromCurrentContext() { | |
47 // Assumption: This method is being invoked within a V8 call stack. CHECKs | |
48 // will fail in the call to frameForCurrentContext() otherwise. | |
49 // | |
50 // Therefore, we can perform look-ups to determine which RenderView is | |
51 // starting the audio device. The reason for all this is because the creator | |
52 // of the WebAudio objects might not be the actual source of the audio (e.g., | |
53 // an extension creates a object that is passed and used within a page). | |
54 blink::WebLocalFrame* const web_frame = | |
55 blink::WebLocalFrame::frameForCurrentContext(); | |
56 RenderFrame* const render_frame = RenderFrame::FromWebFrame(web_frame); | |
57 return render_frame ? render_frame->GetRoutingID() : MSG_ROUTING_NONE; | |
58 } | |
59 | |
60 } // namespace | |
61 | |
29 RendererWebAudioDeviceImpl::RendererWebAudioDeviceImpl( | 62 RendererWebAudioDeviceImpl::RendererWebAudioDeviceImpl( |
30 const media::AudioParameters& params, | 63 media::ChannelLayout layout, |
64 const blink::WebAudioLatencyHint& latency_hint, | |
31 WebAudioDevice::RenderCallback* callback, | 65 WebAudioDevice::RenderCallback* callback, |
32 int session_id, | 66 int session_id, |
33 const url::Origin& security_origin) | 67 const url::Origin& security_origin) |
34 : params_(params), | 68 : latency_hint_(latency_hint), |
35 client_callback_(callback), | 69 client_callback_(callback), |
36 session_id_(session_id), | 70 session_id_(session_id), |
37 security_origin_(security_origin) { | 71 security_origin_(security_origin) { |
38 DCHECK(client_callback_); | 72 DCHECK(client_callback_); |
73 | |
74 media::AudioParameters hardware_params( | |
75 AudioDeviceFactory::GetOutputDeviceInfo(FrameIdFromCurrentContext(), | |
o1ka
2016/12/02 12:02:12
Maybe cache render frame id in a member, instead o
Andrew MacPherson
2016/12/05 14:12:52
Done. Also added a DCHECK() in the constructor to
| |
76 session_id_, std::string(), | |
77 security_origin_) | |
78 .output_params()); | |
79 | |
80 int output_buffer_size = 0; | |
81 | |
82 media::AudioLatency::LatencyType latency = | |
83 AudioDeviceFactory::GetSourceLatencyType( | |
84 GetLatencyHintSourceType(latency_hint_.category())); | |
85 | |
86 // Adjust output buffer size according to the latency requirement. | |
87 switch (latency) { | |
88 case media::AudioLatency::LATENCY_INTERACTIVE: | |
89 output_buffer_size = media::AudioLatency::GetInteractiveBufferSize( | |
90 hardware_params.frames_per_buffer()); | |
91 break; | |
92 case media::AudioLatency::LATENCY_RTC: | |
93 output_buffer_size = media::AudioLatency::GetRtcBufferSize( | |
94 hardware_params.sample_rate(), hardware_params.frames_per_buffer()); | |
95 break; | |
96 case media::AudioLatency::LATENCY_PLAYBACK: | |
97 output_buffer_size = media::AudioLatency::GetHighLatencyBufferSize( | |
98 hardware_params.sample_rate(), 0); | |
99 break; | |
100 case media::AudioLatency::LATENCY_EXACT_MS: | |
101 // TODO(olka): add support when WebAudio requires it. | |
102 default: | |
103 NOTREACHED(); | |
104 } | |
105 | |
106 DCHECK_NE(output_buffer_size, 0); | |
107 | |
108 sink_params_.Reset(media::AudioParameters::AUDIO_PCM_LOW_LATENCY, layout, | |
109 hardware_params.sample_rate(), 16, output_buffer_size); | |
110 | |
111 // Specify the latency info to be passed to the browser side. | |
112 sink_params_.set_latency_tag(latency); | |
39 } | 113 } |
40 | 114 |
41 RendererWebAudioDeviceImpl::~RendererWebAudioDeviceImpl() { | 115 RendererWebAudioDeviceImpl::~RendererWebAudioDeviceImpl() { |
42 DCHECK(!sink_); | 116 DCHECK(!sink_); |
43 } | 117 } |
44 | 118 |
45 void RendererWebAudioDeviceImpl::start() { | 119 void RendererWebAudioDeviceImpl::start() { |
46 DCHECK(thread_checker_.CalledOnValidThread()); | 120 DCHECK(thread_checker_.CalledOnValidThread()); |
47 | 121 |
48 if (sink_) | 122 if (sink_) |
49 return; // Already started. | 123 return; // Already started. |
50 | 124 |
51 // Assumption: This method is being invoked within a V8 call stack. CHECKs | |
52 // will fail in the call to frameForCurrentContext() otherwise. | |
53 // | |
54 // Therefore, we can perform look-ups to determine which RenderView is | |
55 // starting the audio device. The reason for all this is because the creator | |
56 // of the WebAudio objects might not be the actual source of the audio (e.g., | |
57 // an extension creates a object that is passed and used within a page). | |
58 WebLocalFrame* const web_frame = WebLocalFrame::frameForCurrentContext(); | |
59 RenderFrame* const render_frame = | |
60 web_frame ? RenderFrame::FromWebFrame(web_frame) : NULL; | |
61 sink_ = AudioDeviceFactory::NewAudioRendererSink( | 125 sink_ = AudioDeviceFactory::NewAudioRendererSink( |
62 AudioDeviceFactory::kSourceWebAudioInteractive, | 126 GetLatencyHintSourceType(latency_hint_.category()), |
63 render_frame ? render_frame->GetRoutingID() : MSG_ROUTING_NONE, | 127 FrameIdFromCurrentContext(), session_id_, std::string(), |
64 session_id_, std::string(), security_origin_); | 128 security_origin_); |
65 | |
66 // Specify the latency info to be passed to the browser side. | |
67 media::AudioParameters sink_params(params_); | |
68 sink_params.set_latency_tag(AudioDeviceFactory::GetSourceLatencyType( | |
69 AudioDeviceFactory::kSourceWebAudioInteractive)); | |
70 | 129 |
71 #if defined(OS_ANDROID) | 130 #if defined(OS_ANDROID) |
72 // Use the media thread instead of the render thread for fake Render() calls | 131 // Use the media thread instead of the render thread for fake Render() calls |
73 // since it has special connotations for Blink and garbage collection. Timeout | 132 // since it has special connotations for Blink and garbage collection. Timeout |
74 // value chosen to be highly unlikely in the normal case. | 133 // value chosen to be highly unlikely in the normal case. |
75 webaudio_suspender_.reset(new media::SilentSinkSuspender( | 134 webaudio_suspender_.reset(new media::SilentSinkSuspender( |
76 this, base::TimeDelta::FromSeconds(30), sink_params, sink_, | 135 this, base::TimeDelta::FromSeconds(30), sink_params_, sink_, |
77 RenderThreadImpl::current()->GetMediaThreadTaskRunner())); | 136 RenderThreadImpl::current()->GetMediaThreadTaskRunner())); |
78 sink_->Initialize(sink_params, webaudio_suspender_.get()); | 137 sink_->Initialize(sink_params_, webaudio_suspender_.get()); |
79 #else | 138 #else |
80 sink_->Initialize(sink_params, this); | 139 sink_->Initialize(sink_params_, this); |
81 #endif | 140 #endif |
82 | 141 |
83 sink_->Start(); | 142 sink_->Start(); |
84 sink_->Play(); | 143 sink_->Play(); |
85 } | 144 } |
86 | 145 |
87 void RendererWebAudioDeviceImpl::stop() { | 146 void RendererWebAudioDeviceImpl::stop() { |
88 DCHECK(thread_checker_.CalledOnValidThread()); | 147 DCHECK(thread_checker_.CalledOnValidThread()); |
89 if (sink_) { | 148 if (sink_) { |
90 sink_->Stop(); | 149 sink_->Stop(); |
91 sink_ = nullptr; | 150 sink_ = nullptr; |
92 } | 151 } |
93 | 152 |
94 #if defined(OS_ANDROID) | 153 #if defined(OS_ANDROID) |
95 webaudio_suspender_.reset(); | 154 webaudio_suspender_.reset(); |
96 #endif | 155 #endif |
97 } | 156 } |
98 | 157 |
99 double RendererWebAudioDeviceImpl::sampleRate() { | 158 double RendererWebAudioDeviceImpl::sampleRate() { |
100 return params_.sample_rate(); | 159 return sink_params_.sample_rate(); |
160 } | |
161 | |
162 int RendererWebAudioDeviceImpl::framesPerBuffer() { | |
163 return sink_params_.frames_per_buffer(); | |
101 } | 164 } |
102 | 165 |
103 int RendererWebAudioDeviceImpl::Render(media::AudioBus* dest, | 166 int RendererWebAudioDeviceImpl::Render(media::AudioBus* dest, |
104 uint32_t frames_delayed, | 167 uint32_t frames_delayed, |
105 uint32_t frames_skipped) { | 168 uint32_t frames_skipped) { |
106 // Wrap the output pointers using WebVector. | 169 // Wrap the output pointers using WebVector. |
107 WebVector<float*> web_audio_dest_data(static_cast<size_t>(dest->channels())); | 170 WebVector<float*> web_audio_dest_data(static_cast<size_t>(dest->channels())); |
108 for (int i = 0; i < dest->channels(); ++i) | 171 for (int i = 0; i < dest->channels(); ++i) |
109 web_audio_dest_data[i] = dest->channel(i); | 172 web_audio_dest_data[i] = dest->channel(i); |
110 | 173 |
111 // TODO(xians): Remove the following |web_audio_source_data| after | 174 // TODO(xians): Remove the following |web_audio_source_data| after |
112 // changing the blink interface. | 175 // changing the blink interface. |
113 WebVector<float*> web_audio_source_data(static_cast<size_t>(0)); | 176 WebVector<float*> web_audio_source_data(static_cast<size_t>(0)); |
114 client_callback_->render(web_audio_source_data, web_audio_dest_data, | 177 client_callback_->render(web_audio_source_data, web_audio_dest_data, |
115 dest->frames()); | 178 dest->frames()); |
116 return dest->frames(); | 179 return dest->frames(); |
117 } | 180 } |
118 | 181 |
119 void RendererWebAudioDeviceImpl::OnRenderError() { | 182 void RendererWebAudioDeviceImpl::OnRenderError() { |
120 // TODO(crogers): implement error handling. | 183 // TODO(crogers): implement error handling. |
121 } | 184 } |
122 | 185 |
123 } // namespace content | 186 } // namespace content |
OLD | NEW |