OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/renderer/media/renderer_webaudiodevice_impl.h" | 5 #include "content/renderer/media/renderer_webaudiodevice_impl.h" |
6 | 6 |
7 #include <stddef.h> | 7 #include <stddef.h> |
8 | 8 |
9 #include <string> | 9 #include <string> |
10 | 10 |
| 11 #include "base/bind.h" |
11 #include "base/command_line.h" | 12 #include "base/command_line.h" |
12 #include "base/logging.h" | 13 #include "base/logging.h" |
13 #include "base/time/time.h" | 14 #include "base/time/time.h" |
14 #include "build/build_config.h" | 15 #include "build/build_config.h" |
15 #include "content/renderer/media/audio_device_factory.h" | 16 #include "content/renderer/media/audio_device_factory.h" |
16 #include "content/renderer/render_frame_impl.h" | 17 #include "content/renderer/render_frame_impl.h" |
17 #include "content/renderer/render_thread_impl.h" | 18 #include "content/renderer/render_thread_impl.h" |
18 #include "media/base/audio_timestamp_helper.h" | 19 #include "media/base/audio_timestamp_helper.h" |
19 #include "media/base/silent_sink_suspender.h" | 20 #include "media/base/silent_sink_suspender.h" |
20 #include "third_party/WebKit/public/web/WebLocalFrame.h" | 21 #include "third_party/WebKit/public/web/WebLocalFrame.h" |
21 #include "third_party/WebKit/public/web/WebView.h" | 22 #include "third_party/WebKit/public/web/WebView.h" |
22 | 23 |
23 using blink::WebAudioDevice; | 24 using blink::WebAudioDevice; |
| 25 using blink::WebAudioLatencyHint; |
24 using blink::WebLocalFrame; | 26 using blink::WebLocalFrame; |
25 using blink::WebVector; | 27 using blink::WebVector; |
26 using blink::WebView; | 28 using blink::WebView; |
27 | 29 |
28 namespace content { | 30 namespace content { |
29 | 31 |
30 RendererWebAudioDeviceImpl::RendererWebAudioDeviceImpl( | 32 namespace { |
31 const media::AudioParameters& params, | 33 |
| 34 AudioDeviceFactory::SourceType GetLatencyHintSourceType( |
| 35 WebAudioLatencyHint::Category latency_category) { |
| 36 switch (latency_category) { |
| 37 case WebAudioLatencyHint::CategoryInteractive: |
| 38 return AudioDeviceFactory::kSourceWebAudioInteractive; |
| 39 case WebAudioLatencyHint::CategoryBalanced: |
| 40 return AudioDeviceFactory::kSourceWebAudioBalanced; |
| 41 case WebAudioLatencyHint::CategoryPlayback: |
| 42 return AudioDeviceFactory::kSourceWebAudioPlayback; |
| 43 } |
| 44 NOTREACHED(); |
| 45 return AudioDeviceFactory::kSourceWebAudioInteractive; |
| 46 } |
| 47 |
| 48 int FrameIdFromCurrentContext() { |
| 49 // Assumption: This method is being invoked within a V8 call stack. CHECKs |
| 50 // will fail in the call to frameForCurrentContext() otherwise. |
| 51 // |
| 52 // Therefore, we can perform look-ups to determine which RenderView is |
| 53 // starting the audio device. The reason for all this is because the creator |
| 54 // of the WebAudio objects might not be the actual source of the audio (e.g., |
| 55 // an extension creates a object that is passed and used within a page). |
| 56 blink::WebLocalFrame* const web_frame = |
| 57 blink::WebLocalFrame::frameForCurrentContext(); |
| 58 RenderFrame* const render_frame = RenderFrame::FromWebFrame(web_frame); |
| 59 return render_frame ? render_frame->GetRoutingID() : MSG_ROUTING_NONE; |
| 60 } |
| 61 |
| 62 media::AudioParameters GetOutputDeviceParameters( |
| 63 int frame_id, |
| 64 int session_id, |
| 65 const std::string& device_id, |
| 66 const url::Origin& security_origin) { |
| 67 return AudioDeviceFactory::GetOutputDeviceInfo(frame_id, session_id, |
| 68 device_id, security_origin) |
| 69 .output_params(); |
| 70 } |
| 71 |
| 72 } // namespace |
| 73 |
| 74 RendererWebAudioDeviceImpl* RendererWebAudioDeviceImpl::Create( |
| 75 media::ChannelLayout layout, |
| 76 const blink::WebAudioLatencyHint& latency_hint, |
32 WebAudioDevice::RenderCallback* callback, | 77 WebAudioDevice::RenderCallback* callback, |
33 int session_id, | 78 int session_id, |
34 const url::Origin& security_origin) | 79 const url::Origin& security_origin) { |
35 : params_(params), | 80 return new RendererWebAudioDeviceImpl(layout, latency_hint, callback, |
| 81 session_id, security_origin, |
| 82 base::Bind(&GetOutputDeviceParameters), |
| 83 base::Bind(&FrameIdFromCurrentContext)); |
| 84 } |
| 85 |
| 86 RendererWebAudioDeviceImpl::RendererWebAudioDeviceImpl( |
| 87 media::ChannelLayout layout, |
| 88 const blink::WebAudioLatencyHint& latency_hint, |
| 89 WebAudioDevice::RenderCallback* callback, |
| 90 int session_id, |
| 91 const url::Origin& security_origin, |
| 92 const OutputDeviceParamsCallback& device_params_cb, |
| 93 const RenderFrameIdCallback& render_frame_id_cb) |
| 94 : latency_hint_(latency_hint), |
36 client_callback_(callback), | 95 client_callback_(callback), |
37 session_id_(session_id), | 96 session_id_(session_id), |
38 security_origin_(security_origin) { | 97 security_origin_(security_origin), |
| 98 frame_id_(render_frame_id_cb.Run()) { |
39 DCHECK(client_callback_); | 99 DCHECK(client_callback_); |
| 100 DCHECK_NE(frame_id_, MSG_ROUTING_NONE); |
| 101 |
| 102 media::AudioParameters hardware_params(device_params_cb.Run( |
| 103 frame_id_, session_id_, std::string(), security_origin_)); |
| 104 |
| 105 int output_buffer_size = 0; |
| 106 |
| 107 media::AudioLatency::LatencyType latency = |
| 108 AudioDeviceFactory::GetSourceLatencyType( |
| 109 GetLatencyHintSourceType(latency_hint_.category())); |
| 110 |
| 111 // Adjust output buffer size according to the latency requirement. |
| 112 switch (latency) { |
| 113 case media::AudioLatency::LATENCY_INTERACTIVE: |
| 114 output_buffer_size = media::AudioLatency::GetInteractiveBufferSize( |
| 115 hardware_params.frames_per_buffer()); |
| 116 break; |
| 117 case media::AudioLatency::LATENCY_RTC: |
| 118 output_buffer_size = media::AudioLatency::GetRtcBufferSize( |
| 119 hardware_params.sample_rate(), hardware_params.frames_per_buffer()); |
| 120 break; |
| 121 case media::AudioLatency::LATENCY_PLAYBACK: |
| 122 output_buffer_size = media::AudioLatency::GetHighLatencyBufferSize( |
| 123 hardware_params.sample_rate(), 0); |
| 124 break; |
| 125 case media::AudioLatency::LATENCY_EXACT_MS: |
| 126 // TODO(olka): add support when WebAudio requires it. |
| 127 default: |
| 128 NOTREACHED(); |
| 129 } |
| 130 |
| 131 DCHECK_NE(output_buffer_size, 0); |
| 132 |
| 133 sink_params_.Reset(media::AudioParameters::AUDIO_PCM_LOW_LATENCY, layout, |
| 134 hardware_params.sample_rate(), 16, output_buffer_size); |
| 135 |
| 136 // Specify the latency info to be passed to the browser side. |
| 137 sink_params_.set_latency_tag(latency); |
40 } | 138 } |
41 | 139 |
42 RendererWebAudioDeviceImpl::~RendererWebAudioDeviceImpl() { | 140 RendererWebAudioDeviceImpl::~RendererWebAudioDeviceImpl() { |
43 DCHECK(!sink_); | 141 DCHECK(!sink_); |
44 } | 142 } |
45 | 143 |
46 void RendererWebAudioDeviceImpl::start() { | 144 void RendererWebAudioDeviceImpl::start() { |
47 DCHECK(thread_checker_.CalledOnValidThread()); | 145 DCHECK(thread_checker_.CalledOnValidThread()); |
48 | 146 |
49 if (sink_) | 147 if (sink_) |
50 return; // Already started. | 148 return; // Already started. |
51 | 149 |
52 // Assumption: This method is being invoked within a V8 call stack. CHECKs | |
53 // will fail in the call to frameForCurrentContext() otherwise. | |
54 // | |
55 // Therefore, we can perform look-ups to determine which RenderView is | |
56 // starting the audio device. The reason for all this is because the creator | |
57 // of the WebAudio objects might not be the actual source of the audio (e.g., | |
58 // an extension creates a object that is passed and used within a page). | |
59 WebLocalFrame* const web_frame = WebLocalFrame::frameForCurrentContext(); | |
60 RenderFrame* const render_frame = | |
61 web_frame ? RenderFrame::FromWebFrame(web_frame) : NULL; | |
62 sink_ = AudioDeviceFactory::NewAudioRendererSink( | 150 sink_ = AudioDeviceFactory::NewAudioRendererSink( |
63 AudioDeviceFactory::kSourceWebAudioInteractive, | 151 GetLatencyHintSourceType(latency_hint_.category()), frame_id_, |
64 render_frame ? render_frame->GetRoutingID() : MSG_ROUTING_NONE, | |
65 session_id_, std::string(), security_origin_); | 152 session_id_, std::string(), security_origin_); |
66 | 153 |
67 // Specify the latency info to be passed to the browser side. | |
68 media::AudioParameters sink_params(params_); | |
69 sink_params.set_latency_tag(AudioDeviceFactory::GetSourceLatencyType( | |
70 AudioDeviceFactory::kSourceWebAudioInteractive)); | |
71 | |
72 #if defined(OS_ANDROID) | 154 #if defined(OS_ANDROID) |
73 // Use the media thread instead of the render thread for fake Render() calls | 155 // Use the media thread instead of the render thread for fake Render() calls |
74 // since it has special connotations for Blink and garbage collection. Timeout | 156 // since it has special connotations for Blink and garbage collection. Timeout |
75 // value chosen to be highly unlikely in the normal case. | 157 // value chosen to be highly unlikely in the normal case. |
76 webaudio_suspender_.reset(new media::SilentSinkSuspender( | 158 webaudio_suspender_.reset(new media::SilentSinkSuspender( |
77 this, base::TimeDelta::FromSeconds(30), sink_params, sink_, | 159 this, base::TimeDelta::FromSeconds(30), sink_params_, sink_, |
78 RenderThreadImpl::current()->GetMediaThreadTaskRunner())); | 160 RenderThreadImpl::current()->GetMediaThreadTaskRunner())); |
79 sink_->Initialize(sink_params, webaudio_suspender_.get()); | 161 sink_->Initialize(sink_params_, webaudio_suspender_.get()); |
80 #else | 162 #else |
81 sink_->Initialize(sink_params, this); | 163 sink_->Initialize(sink_params_, this); |
82 #endif | 164 #endif |
83 | 165 |
84 sink_->Start(); | 166 sink_->Start(); |
85 sink_->Play(); | 167 sink_->Play(); |
86 } | 168 } |
87 | 169 |
88 void RendererWebAudioDeviceImpl::stop() { | 170 void RendererWebAudioDeviceImpl::stop() { |
89 DCHECK(thread_checker_.CalledOnValidThread()); | 171 DCHECK(thread_checker_.CalledOnValidThread()); |
90 if (sink_) { | 172 if (sink_) { |
91 sink_->Stop(); | 173 sink_->Stop(); |
92 sink_ = nullptr; | 174 sink_ = nullptr; |
93 } | 175 } |
94 | 176 |
95 #if defined(OS_ANDROID) | 177 #if defined(OS_ANDROID) |
96 webaudio_suspender_.reset(); | 178 webaudio_suspender_.reset(); |
97 #endif | 179 #endif |
98 } | 180 } |
99 | 181 |
100 double RendererWebAudioDeviceImpl::sampleRate() { | 182 double RendererWebAudioDeviceImpl::sampleRate() { |
101 return params_.sample_rate(); | 183 return sink_params_.sample_rate(); |
| 184 } |
| 185 |
| 186 int RendererWebAudioDeviceImpl::framesPerBuffer() { |
| 187 return sink_params_.frames_per_buffer(); |
102 } | 188 } |
103 | 189 |
104 int RendererWebAudioDeviceImpl::Render(base::TimeDelta delay, | 190 int RendererWebAudioDeviceImpl::Render(base::TimeDelta delay, |
105 base::TimeTicks delay_timestamp, | 191 base::TimeTicks delay_timestamp, |
106 int prior_frames_skipped, | 192 int prior_frames_skipped, |
107 media::AudioBus* dest) { | 193 media::AudioBus* dest) { |
108 // Wrap the output pointers using WebVector. | 194 // Wrap the output pointers using WebVector. |
109 WebVector<float*> web_audio_dest_data(static_cast<size_t>(dest->channels())); | 195 WebVector<float*> web_audio_dest_data(static_cast<size_t>(dest->channels())); |
110 for (int i = 0; i < dest->channels(); ++i) | 196 for (int i = 0; i < dest->channels(); ++i) |
111 web_audio_dest_data[i] = dest->channel(i); | 197 web_audio_dest_data[i] = dest->channel(i); |
112 | 198 |
113 // TODO(xians): Remove the following |web_audio_source_data| after | 199 // TODO(xians): Remove the following |web_audio_source_data| after |
114 // changing the blink interface. | 200 // changing the blink interface. |
115 WebVector<float*> web_audio_source_data(static_cast<size_t>(0)); | 201 WebVector<float*> web_audio_source_data(static_cast<size_t>(0)); |
116 client_callback_->render(web_audio_source_data, web_audio_dest_data, | 202 client_callback_->render(web_audio_source_data, web_audio_dest_data, |
117 dest->frames()); | 203 dest->frames()); |
118 return dest->frames(); | 204 return dest->frames(); |
119 } | 205 } |
120 | 206 |
121 void RendererWebAudioDeviceImpl::OnRenderError() { | 207 void RendererWebAudioDeviceImpl::OnRenderError() { |
122 // TODO(crogers): implement error handling. | 208 // TODO(crogers): implement error handling. |
123 } | 209 } |
124 | 210 |
125 } // namespace content | 211 } // namespace content |
OLD | NEW |