Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(666)

Side by Side Diff: third_party/WebKit/Source/modules/webaudio/BaseAudioContext.cpp

Issue 1865583002: Implement BaseAudioContext (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Created 4 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright (C) 2010, Google Inc. All rights reserved. 2 * Copyright (C) 2010, Google Inc. All rights reserved.
3 * 3 *
4 * Redistribution and use in source and binary forms, with or without 4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions 5 * modification, are permitted provided that the following conditions
6 * are met: 6 * are met:
7 * 1. Redistributions of source code must retain the above copyright 7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer. 8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright 9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the 10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution. 11 * documentation and/or other materials provided with the distribution.
12 * 12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND AN Y 13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND AN Y
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
15 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 15 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
16 * DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR AN Y 16 * DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR AN Y
17 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 17 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
18 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 18 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
19 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND O N 19 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND O N
20 * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 20 * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
21 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 21 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
22 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 22 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
23 */ 23 */
24 24
25 #include "modules/webaudio/AbstractAudioContext.h" 25 #include "modules/webaudio/BaseAudioContext.h"
26
26 #include "bindings/core/v8/Dictionary.h" 27 #include "bindings/core/v8/Dictionary.h"
27 #include "bindings/core/v8/ExceptionMessages.h" 28 #include "bindings/core/v8/ExceptionMessages.h"
28 #include "bindings/core/v8/ExceptionState.h" 29 #include "bindings/core/v8/ExceptionState.h"
29 #include "bindings/core/v8/ScriptPromiseResolver.h" 30 #include "bindings/core/v8/ScriptPromiseResolver.h"
30 #include "bindings/core/v8/ScriptState.h" 31 #include "bindings/core/v8/ScriptState.h"
31 #include "core/dom/DOMException.h" 32 #include "core/dom/DOMException.h"
32 #include "core/dom/Document.h" 33 #include "core/dom/Document.h"
33 #include "core/dom/ExceptionCode.h" 34 #include "core/dom/ExceptionCode.h"
34 #include "core/dom/ExecutionContextTask.h" 35 #include "core/dom/ExecutionContextTask.h"
35 #include "core/html/HTMLMediaElement.h" 36 #include "core/html/HTMLMediaElement.h"
(...skipping 27 matching lines...) Expand all
63 #include "modules/webaudio/ScriptProcessorNode.h" 64 #include "modules/webaudio/ScriptProcessorNode.h"
64 #include "modules/webaudio/StereoPannerNode.h" 65 #include "modules/webaudio/StereoPannerNode.h"
65 #include "modules/webaudio/WaveShaperNode.h" 66 #include "modules/webaudio/WaveShaperNode.h"
66 #include "platform/ThreadSafeFunctional.h" 67 #include "platform/ThreadSafeFunctional.h"
67 #include "platform/audio/IIRFilter.h" 68 #include "platform/audio/IIRFilter.h"
68 #include "public/platform/Platform.h" 69 #include "public/platform/Platform.h"
69 #include "wtf/text/WTFString.h" 70 #include "wtf/text/WTFString.h"
70 71
71 namespace blink { 72 namespace blink {
72 73
73 AbstractAudioContext* AbstractAudioContext::create(Document& document, Exception State& exceptionState) 74 BaseAudioContext* BaseAudioContext::create(Document& document, ExceptionState& e xceptionState)
74 { 75 {
75 return AudioContext::create(document, exceptionState); 76 return AudioContext::create(document, exceptionState);
76 } 77 }
77 78
78 // FIXME(dominicc): Devolve these constructors to AudioContext 79 // FIXME(dominicc): Devolve these constructors to AudioContext
79 // and OfflineAudioContext respectively. 80 // and OfflineAudioContext respectively.
80 81
81 // Constructor for rendering to the audio hardware. 82 // Constructor for rendering to the audio hardware.
82 AbstractAudioContext::AbstractAudioContext(Document* document) 83 BaseAudioContext::BaseAudioContext(Document* document)
83 : ActiveScriptWrappable(this) 84 : ActiveScriptWrappable(this)
84 , ActiveDOMObject(document) 85 , ActiveDOMObject(document)
85 , m_destinationNode(nullptr) 86 , m_destinationNode(nullptr)
86 , m_isCleared(false) 87 , m_isCleared(false)
87 , m_isResolvingResumePromises(false) 88 , m_isResolvingResumePromises(false)
88 , m_connectionCount(0) 89 , m_connectionCount(0)
89 , m_didInitializeContextGraphMutex(false) 90 , m_didInitializeContextGraphMutex(false)
90 , m_deferredTaskHandler(DeferredTaskHandler::create()) 91 , m_deferredTaskHandler(DeferredTaskHandler::create())
91 , m_contextState(Suspended) 92 , m_contextState(Suspended)
92 , m_closedContextSampleRate(-1) 93 , m_closedContextSampleRate(-1)
93 , m_periodicWaveSine(nullptr) 94 , m_periodicWaveSine(nullptr)
94 , m_periodicWaveSquare(nullptr) 95 , m_periodicWaveSquare(nullptr)
95 , m_periodicWaveSawtooth(nullptr) 96 , m_periodicWaveSawtooth(nullptr)
96 , m_periodicWaveTriangle(nullptr) 97 , m_periodicWaveTriangle(nullptr)
97 { 98 {
98 m_didInitializeContextGraphMutex = true; 99 m_didInitializeContextGraphMutex = true;
99 m_destinationNode = DefaultAudioDestinationNode::create(this); 100 m_destinationNode = DefaultAudioDestinationNode::create(this);
100 101
101 initialize(); 102 initialize();
102 } 103 }
103 104
104 // Constructor for offline (non-realtime) rendering. 105 // Constructor for offline (non-realtime) rendering.
105 AbstractAudioContext::AbstractAudioContext(Document* document, unsigned numberOf Channels, size_t numberOfFrames, float sampleRate) 106 BaseAudioContext::BaseAudioContext(Document* document, unsigned numberOfChannels , size_t numberOfFrames, float sampleRate)
106 : ActiveScriptWrappable(this) 107 : ActiveScriptWrappable(this)
107 , ActiveDOMObject(document) 108 , ActiveDOMObject(document)
108 , m_destinationNode(nullptr) 109 , m_destinationNode(nullptr)
109 , m_isCleared(false) 110 , m_isCleared(false)
110 , m_isResolvingResumePromises(false) 111 , m_isResolvingResumePromises(false)
111 , m_connectionCount(0) 112 , m_connectionCount(0)
112 , m_didInitializeContextGraphMutex(false) 113 , m_didInitializeContextGraphMutex(false)
113 , m_deferredTaskHandler(DeferredTaskHandler::create()) 114 , m_deferredTaskHandler(DeferredTaskHandler::create())
114 , m_contextState(Suspended) 115 , m_contextState(Suspended)
115 , m_closedContextSampleRate(-1) 116 , m_closedContextSampleRate(-1)
116 , m_periodicWaveSine(nullptr) 117 , m_periodicWaveSine(nullptr)
117 , m_periodicWaveSquare(nullptr) 118 , m_periodicWaveSquare(nullptr)
118 , m_periodicWaveSawtooth(nullptr) 119 , m_periodicWaveSawtooth(nullptr)
119 , m_periodicWaveTriangle(nullptr) 120 , m_periodicWaveTriangle(nullptr)
120 { 121 {
121 m_didInitializeContextGraphMutex = true; 122 m_didInitializeContextGraphMutex = true;
122 } 123 }
123 124
124 AbstractAudioContext::~AbstractAudioContext() 125 BaseAudioContext::~BaseAudioContext()
125 { 126 {
126 deferredTaskHandler().contextWillBeDestroyed(); 127 deferredTaskHandler().contextWillBeDestroyed();
127 // AudioNodes keep a reference to their context, so there should be no way t o be in the destructor if there are still AudioNodes around. 128 // AudioNodes keep a reference to their context, so there should be no way t o be in the destructor if there are still AudioNodes around.
128 ASSERT(!isDestinationInitialized()); 129 ASSERT(!isDestinationInitialized());
129 ASSERT(!m_activeSourceNodes.size()); 130 ASSERT(!m_activeSourceNodes.size());
130 ASSERT(!m_finishedSourceHandlers.size()); 131 ASSERT(!m_finishedSourceHandlers.size());
131 ASSERT(!m_isResolvingResumePromises); 132 ASSERT(!m_isResolvingResumePromises);
132 ASSERT(!m_resumeResolvers.size()); 133 ASSERT(!m_resumeResolvers.size());
133 } 134 }
134 135
135 void AbstractAudioContext::initialize() 136 void BaseAudioContext::initialize()
136 { 137 {
137 if (isDestinationInitialized()) 138 if (isDestinationInitialized())
138 return; 139 return;
139 140
140 FFTFrame::initialize(); 141 FFTFrame::initialize();
141 m_listener = AudioListener::create(); 142 m_listener = AudioListener::create();
142 143
143 if (m_destinationNode.get()) { 144 if (m_destinationNode.get()) {
144 m_destinationNode->handler().initialize(); 145 m_destinationNode->handler().initialize();
145 } 146 }
146 } 147 }
147 148
148 void AbstractAudioContext::clear() 149 void BaseAudioContext::clear()
149 { 150 {
150 m_destinationNode.clear(); 151 m_destinationNode.clear();
151 // The audio rendering thread is dead. Nobody will schedule AudioHandler 152 // The audio rendering thread is dead. Nobody will schedule AudioHandler
152 // deletion. Let's do it ourselves. 153 // deletion. Let's do it ourselves.
153 deferredTaskHandler().clearHandlersToBeDeleted(); 154 deferredTaskHandler().clearHandlersToBeDeleted();
154 m_isCleared = true; 155 m_isCleared = true;
155 } 156 }
156 157
157 void AbstractAudioContext::uninitialize() 158 void BaseAudioContext::uninitialize()
158 { 159 {
159 ASSERT(isMainThread()); 160 ASSERT(isMainThread());
160 161
161 if (!isDestinationInitialized()) 162 if (!isDestinationInitialized())
162 return; 163 return;
163 164
164 // This stops the audio thread and all audio rendering. 165 // This stops the audio thread and all audio rendering.
165 if (m_destinationNode) 166 if (m_destinationNode)
166 m_destinationNode->handler().uninitialize(); 167 m_destinationNode->handler().uninitialize();
167 168
168 // Get rid of the sources which may still be playing. 169 // Get rid of the sources which may still be playing.
169 releaseActiveSourceNodes(); 170 releaseActiveSourceNodes();
170 171
171 // Reject any pending resolvers before we go away. 172 // Reject any pending resolvers before we go away.
172 rejectPendingResolvers(); 173 rejectPendingResolvers();
173 didClose(); 174 didClose();
174 175
175 ASSERT(m_listener); 176 ASSERT(m_listener);
176 m_listener->waitForHRTFDatabaseLoaderThreadCompletion(); 177 m_listener->waitForHRTFDatabaseLoaderThreadCompletion();
177 178
178 clear(); 179 clear();
179 } 180 }
180 181
181 void AbstractAudioContext::stop() 182 void BaseAudioContext::stop()
182 { 183 {
183 uninitialize(); 184 uninitialize();
184 } 185 }
185 186
186 bool AbstractAudioContext::hasPendingActivity() const 187 bool BaseAudioContext::hasPendingActivity() const
187 { 188 {
188 // There's no pending activity if the audio context has been cleared. 189 // There's no pending activity if the audio context has been cleared.
189 return !m_isCleared; 190 return !m_isCleared;
190 } 191 }
191 192
192 void AbstractAudioContext::throwExceptionForClosedState(ExceptionState& exceptio nState) 193 void BaseAudioContext::throwExceptionForClosedState(ExceptionState& exceptionSta te)
193 { 194 {
194 exceptionState.throwDOMException(InvalidStateError, "AudioContext has been c losed."); 195 exceptionState.throwDOMException(InvalidStateError, "AudioContext has been c losed.");
195 } 196 }
196 197
197 AudioBuffer* AbstractAudioContext::createBuffer(unsigned numberOfChannels, size_ t numberOfFrames, float sampleRate, ExceptionState& exceptionState) 198 AudioBuffer* BaseAudioContext::createBuffer(unsigned numberOfChannels, size_t nu mberOfFrames, float sampleRate, ExceptionState& exceptionState)
198 { 199 {
199 // It's ok to call createBuffer, even if the context is closed because the A udioBuffer doesn't 200 // It's ok to call createBuffer, even if the context is closed because the A udioBuffer doesn't
200 // really "belong" to any particular context. 201 // really "belong" to any particular context.
201 202
202 return AudioBuffer::create(numberOfChannels, numberOfFrames, sampleRate, exc eptionState); 203 return AudioBuffer::create(numberOfChannels, numberOfFrames, sampleRate, exc eptionState);
203 } 204 }
204 205
205 ScriptPromise AbstractAudioContext::decodeAudioData(ScriptState* scriptState, DO MArrayBuffer* audioData, AudioBufferCallback* successCallback, AudioBufferCallba ck* errorCallback, ExceptionState& exceptionState) 206 ScriptPromise BaseAudioContext::decodeAudioData(ScriptState* scriptState, DOMArr ayBuffer* audioData, AudioBufferCallback* successCallback, AudioBufferCallback* errorCallback, ExceptionState& exceptionState)
206 { 207 {
207 ASSERT(isMainThread()); 208 ASSERT(isMainThread());
208 ASSERT(audioData); 209 ASSERT(audioData);
209 210
210 ScriptPromiseResolver* resolver = ScriptPromiseResolver::create(scriptState) ; 211 ScriptPromiseResolver* resolver = ScriptPromiseResolver::create(scriptState) ;
211 ScriptPromise promise = resolver->promise(); 212 ScriptPromise promise = resolver->promise();
212 213
213 float rate = isContextClosed() ? closedContextSampleRate() : sampleRate(); 214 float rate = isContextClosed() ? closedContextSampleRate() : sampleRate();
214 215
215 ASSERT(rate > 0); 216 ASSERT(rate > 0);
216 217
217 m_decodeAudioResolvers.add(resolver); 218 m_decodeAudioResolvers.add(resolver);
218 m_audioDecoder.decodeAsync(audioData, rate, successCallback, errorCallback, resolver, this); 219 m_audioDecoder.decodeAsync(audioData, rate, successCallback, errorCallback, resolver, this);
219 220
220 return promise; 221 return promise;
221 } 222 }
222 223
223 void AbstractAudioContext::handleDecodeAudioData(AudioBuffer* audioBuffer, Scrip tPromiseResolver* resolver, AudioBufferCallback* successCallback, AudioBufferCal lback* errorCallback) 224 void BaseAudioContext::handleDecodeAudioData(AudioBuffer* audioBuffer, ScriptPro miseResolver* resolver, AudioBufferCallback* successCallback, AudioBufferCallbac k* errorCallback)
224 { 225 {
225 ASSERT(isMainThread()); 226 ASSERT(isMainThread());
226 227
227 if (audioBuffer) { 228 if (audioBuffer) {
228 // Resolve promise successfully and run the success callback 229 // Resolve promise successfully and run the success callback
229 resolver->resolve(audioBuffer); 230 resolver->resolve(audioBuffer);
230 if (successCallback) 231 if (successCallback)
231 successCallback->handleEvent(audioBuffer); 232 successCallback->handleEvent(audioBuffer);
232 } else { 233 } else {
233 // Reject the promise and run the error callback 234 // Reject the promise and run the error callback
234 DOMException* error = DOMException::create(EncodingError, "Unable to dec ode audio data"); 235 DOMException* error = DOMException::create(EncodingError, "Unable to dec ode audio data");
235 resolver->reject(error); 236 resolver->reject(error);
236 if (errorCallback) 237 if (errorCallback)
237 errorCallback->handleEvent(error); 238 errorCallback->handleEvent(error);
238 } 239 }
239 240
240 // We've resolved the promise. Remove it now. 241 // We've resolved the promise. Remove it now.
241 ASSERT(m_decodeAudioResolvers.contains(resolver)); 242 ASSERT(m_decodeAudioResolvers.contains(resolver));
242 m_decodeAudioResolvers.remove(resolver); 243 m_decodeAudioResolvers.remove(resolver);
243 } 244 }
244 245
245 AudioBufferSourceNode* AbstractAudioContext::createBufferSource(ExceptionState& exceptionState) 246 AudioBufferSourceNode* BaseAudioContext::createBufferSource(ExceptionState& exce ptionState)
246 { 247 {
247 ASSERT(isMainThread()); 248 ASSERT(isMainThread());
248 249
249 if (isContextClosed()) { 250 if (isContextClosed()) {
250 throwExceptionForClosedState(exceptionState); 251 throwExceptionForClosedState(exceptionState);
251 return nullptr; 252 return nullptr;
252 } 253 }
253 254
254 AudioBufferSourceNode* node = AudioBufferSourceNode::create(*this, sampleRat e()); 255 AudioBufferSourceNode* node = AudioBufferSourceNode::create(*this, sampleRat e());
255 256
256 // Do not add a reference to this source node now. The reference will be add ed when start() is 257 // Do not add a reference to this source node now. The reference will be add ed when start() is
257 // called. 258 // called.
258 259
259 return node; 260 return node;
260 } 261 }
261 262
262 MediaElementAudioSourceNode* AbstractAudioContext::createMediaElementSource(HTML MediaElement* mediaElement, ExceptionState& exceptionState) 263 ScriptProcessorNode* BaseAudioContext::createScriptProcessor(ExceptionState& exc eptionState)
263 {
264 ASSERT(isMainThread());
265
266 if (isContextClosed()) {
267 throwExceptionForClosedState(exceptionState);
268 return nullptr;
269 }
270
271 // First check if this media element already has a source node.
272 if (mediaElement->audioSourceNode()) {
273 exceptionState.throwDOMException(
274 InvalidStateError,
275 "HTMLMediaElement already connected previously to a different MediaE lementSourceNode.");
276 return nullptr;
277 }
278
279 MediaElementAudioSourceNode* node = MediaElementAudioSourceNode::create(*thi s, *mediaElement);
280
281 mediaElement->setAudioSourceNode(node);
282
283 notifySourceNodeStartedProcessing(node); // context keeps reference until no de is disconnected
284 return node;
285 }
286
287 MediaStreamAudioSourceNode* AbstractAudioContext::createMediaStreamSource(MediaS tream* mediaStream, ExceptionState& exceptionState)
288 {
289 ASSERT(isMainThread());
290
291 if (isContextClosed()) {
292 throwExceptionForClosedState(exceptionState);
293 return nullptr;
294 }
295
296 MediaStreamTrackVector audioTracks = mediaStream->getAudioTracks();
297 if (audioTracks.isEmpty()) {
298 exceptionState.throwDOMException(
299 InvalidStateError,
300 "MediaStream has no audio track");
301 return nullptr;
302 }
303
304 // Use the first audio track in the media stream.
305 MediaStreamTrack* audioTrack = audioTracks[0];
306 OwnPtr<AudioSourceProvider> provider = audioTrack->createWebAudioSource();
307 MediaStreamAudioSourceNode* node = MediaStreamAudioSourceNode::create(*this, *mediaStream, audioTrack, provider.release());
308
309 // FIXME: Only stereo streams are supported right now. We should be able to accept multi-channel streams.
310 node->setFormat(2, sampleRate());
311
312 notifySourceNodeStartedProcessing(node); // context keeps reference until no de is disconnected
313 return node;
314 }
315
316 MediaStreamAudioDestinationNode* AbstractAudioContext::createMediaStreamDestinat ion(ExceptionState& exceptionState)
317 {
318 if (isContextClosed()) {
319 throwExceptionForClosedState(exceptionState);
320 return nullptr;
321 }
322
323 // Set number of output channels to stereo by default.
324 return MediaStreamAudioDestinationNode::create(*this, 2);
325 }
326
327 ScriptProcessorNode* AbstractAudioContext::createScriptProcessor(ExceptionState& exceptionState)
328 { 264 {
329 // Set number of input/output channels to stereo by default. 265 // Set number of input/output channels to stereo by default.
330 return createScriptProcessor(0, 2, 2, exceptionState); 266 return createScriptProcessor(0, 2, 2, exceptionState);
331 } 267 }
332 268
333 ScriptProcessorNode* AbstractAudioContext::createScriptProcessor(size_t bufferSi ze, ExceptionState& exceptionState) 269 ScriptProcessorNode* BaseAudioContext::createScriptProcessor(size_t bufferSize, ExceptionState& exceptionState)
334 { 270 {
335 // Set number of input/output channels to stereo by default. 271 // Set number of input/output channels to stereo by default.
336 return createScriptProcessor(bufferSize, 2, 2, exceptionState); 272 return createScriptProcessor(bufferSize, 2, 2, exceptionState);
337 } 273 }
338 274
339 ScriptProcessorNode* AbstractAudioContext::createScriptProcessor(size_t bufferSi ze, size_t numberOfInputChannels, ExceptionState& exceptionState) 275 ScriptProcessorNode* BaseAudioContext::createScriptProcessor(size_t bufferSize, size_t numberOfInputChannels, ExceptionState& exceptionState)
340 { 276 {
341 // Set number of output channels to stereo by default. 277 // Set number of output channels to stereo by default.
342 return createScriptProcessor(bufferSize, numberOfInputChannels, 2, exception State); 278 return createScriptProcessor(bufferSize, numberOfInputChannels, 2, exception State);
343 } 279 }
344 280
345 ScriptProcessorNode* AbstractAudioContext::createScriptProcessor(size_t bufferSi ze, size_t numberOfInputChannels, size_t numberOfOutputChannels, ExceptionState& exceptionState) 281 ScriptProcessorNode* BaseAudioContext::createScriptProcessor(size_t bufferSize, size_t numberOfInputChannels, size_t numberOfOutputChannels, ExceptionState& exc eptionState)
346 { 282 {
347 ASSERT(isMainThread()); 283 ASSERT(isMainThread());
348 284
349 if (isContextClosed()) { 285 if (isContextClosed()) {
350 throwExceptionForClosedState(exceptionState); 286 throwExceptionForClosedState(exceptionState);
351 return nullptr; 287 return nullptr;
352 } 288 }
353 289
354 ScriptProcessorNode* node = ScriptProcessorNode::create(*this, sampleRate(), bufferSize, numberOfInputChannels, numberOfOutputChannels); 290 ScriptProcessorNode* node = ScriptProcessorNode::create(*this, sampleRate(), bufferSize, numberOfInputChannels, numberOfOutputChannels);
355 291
356 if (!node) { 292 if (!node) {
357 if (!numberOfInputChannels && !numberOfOutputChannels) { 293 if (!numberOfInputChannels && !numberOfOutputChannels) {
358 exceptionState.throwDOMException( 294 exceptionState.throwDOMException(
359 IndexSizeError, 295 IndexSizeError,
360 "number of input channels and output channels cannot both be zer o."); 296 "number of input channels and output channels cannot both be zer o.");
361 } else if (numberOfInputChannels > AbstractAudioContext::maxNumberOfChan nels()) { 297 } else if (numberOfInputChannels > BaseAudioContext::maxNumberOfChannels ()) {
362 exceptionState.throwDOMException( 298 exceptionState.throwDOMException(
363 IndexSizeError, 299 IndexSizeError,
364 "number of input channels (" + String::number(numberOfInputChann els) 300 "number of input channels (" + String::number(numberOfInputChann els)
365 + ") exceeds maximum (" 301 + ") exceeds maximum ("
366 + String::number(AbstractAudioContext::maxNumberOfChannels()) + ")."); 302 + String::number(BaseAudioContext::maxNumberOfChannels()) + ")." );
367 } else if (numberOfOutputChannels > AbstractAudioContext::maxNumberOfCha nnels()) { 303 } else if (numberOfOutputChannels > BaseAudioContext::maxNumberOfChannel s()) {
368 exceptionState.throwDOMException( 304 exceptionState.throwDOMException(
369 IndexSizeError, 305 IndexSizeError,
370 "number of output channels (" + String::number(numberOfInputChan nels) 306 "number of output channels (" + String::number(numberOfInputChan nels)
371 + ") exceeds maximum (" 307 + ") exceeds maximum ("
372 + String::number(AbstractAudioContext::maxNumberOfChannels()) + ")."); 308 + String::number(BaseAudioContext::maxNumberOfChannels()) + ")." );
373 } else { 309 } else {
374 exceptionState.throwDOMException( 310 exceptionState.throwDOMException(
375 IndexSizeError, 311 IndexSizeError,
376 "buffer size (" + String::number(bufferSize) 312 "buffer size (" + String::number(bufferSize)
377 + ") must be a power of two between 256 and 16384."); 313 + ") must be a power of two between 256 and 16384.");
378 } 314 }
379 return nullptr; 315 return nullptr;
380 } 316 }
381 317
382 notifySourceNodeStartedProcessing(node); // context keeps reference until we stop making javascript rendering callbacks 318 notifySourceNodeStartedProcessing(node); // context keeps reference until we stop making javascript rendering callbacks
383 return node; 319 return node;
384 } 320 }
385 321
386 StereoPannerNode* AbstractAudioContext::createStereoPanner(ExceptionState& excep tionState) 322 StereoPannerNode* BaseAudioContext::createStereoPanner(ExceptionState& exception State)
387 { 323 {
388 ASSERT(isMainThread()); 324 ASSERT(isMainThread());
389 if (isContextClosed()) { 325 if (isContextClosed()) {
390 throwExceptionForClosedState(exceptionState); 326 throwExceptionForClosedState(exceptionState);
391 return nullptr; 327 return nullptr;
392 } 328 }
393 329
394 return StereoPannerNode::create(*this, sampleRate()); 330 return StereoPannerNode::create(*this, sampleRate());
395 } 331 }
396 332
397 BiquadFilterNode* AbstractAudioContext::createBiquadFilter(ExceptionState& excep tionState) 333 BiquadFilterNode* BaseAudioContext::createBiquadFilter(ExceptionState& exception State)
398 { 334 {
399 ASSERT(isMainThread()); 335 ASSERT(isMainThread());
400 if (isContextClosed()) { 336 if (isContextClosed()) {
401 throwExceptionForClosedState(exceptionState); 337 throwExceptionForClosedState(exceptionState);
402 return nullptr; 338 return nullptr;
403 } 339 }
404 340
405 return BiquadFilterNode::create(*this, sampleRate()); 341 return BiquadFilterNode::create(*this, sampleRate());
406 } 342 }
407 343
408 WaveShaperNode* AbstractAudioContext::createWaveShaper(ExceptionState& exception State) 344 WaveShaperNode* BaseAudioContext::createWaveShaper(ExceptionState& exceptionStat e)
409 { 345 {
410 ASSERT(isMainThread()); 346 ASSERT(isMainThread());
411 if (isContextClosed()) { 347 if (isContextClosed()) {
412 throwExceptionForClosedState(exceptionState); 348 throwExceptionForClosedState(exceptionState);
413 return nullptr; 349 return nullptr;
414 } 350 }
415 351
416 return WaveShaperNode::create(*this); 352 return WaveShaperNode::create(*this);
417 } 353 }
418 354
419 PannerNode* AbstractAudioContext::createPanner(ExceptionState& exceptionState) 355 PannerNode* BaseAudioContext::createPanner(ExceptionState& exceptionState)
420 { 356 {
421 ASSERT(isMainThread()); 357 ASSERT(isMainThread());
422 if (isContextClosed()) { 358 if (isContextClosed()) {
423 throwExceptionForClosedState(exceptionState); 359 throwExceptionForClosedState(exceptionState);
424 return nullptr; 360 return nullptr;
425 } 361 }
426 362
427 return PannerNode::create(*this, sampleRate()); 363 return PannerNode::create(*this, sampleRate());
428 } 364 }
429 365
430 ConvolverNode* AbstractAudioContext::createConvolver(ExceptionState& exceptionSt ate) 366 ConvolverNode* BaseAudioContext::createConvolver(ExceptionState& exceptionState)
431 { 367 {
432 ASSERT(isMainThread()); 368 ASSERT(isMainThread());
433 if (isContextClosed()) { 369 if (isContextClosed()) {
434 throwExceptionForClosedState(exceptionState); 370 throwExceptionForClosedState(exceptionState);
435 return nullptr; 371 return nullptr;
436 } 372 }
437 373
438 return ConvolverNode::create(*this, sampleRate()); 374 return ConvolverNode::create(*this, sampleRate());
439 } 375 }
440 376
441 DynamicsCompressorNode* AbstractAudioContext::createDynamicsCompressor(Exception State& exceptionState) 377 DynamicsCompressorNode* BaseAudioContext::createDynamicsCompressor(ExceptionStat e& exceptionState)
442 { 378 {
443 ASSERT(isMainThread()); 379 ASSERT(isMainThread());
444 if (isContextClosed()) { 380 if (isContextClosed()) {
445 throwExceptionForClosedState(exceptionState); 381 throwExceptionForClosedState(exceptionState);
446 return nullptr; 382 return nullptr;
447 } 383 }
448 384
449 return DynamicsCompressorNode::create(*this, sampleRate()); 385 return DynamicsCompressorNode::create(*this, sampleRate());
450 } 386 }
451 387
452 AnalyserNode* AbstractAudioContext::createAnalyser(ExceptionState& exceptionStat e) 388 AnalyserNode* BaseAudioContext::createAnalyser(ExceptionState& exceptionState)
453 { 389 {
454 ASSERT(isMainThread()); 390 ASSERT(isMainThread());
455 if (isContextClosed()) { 391 if (isContextClosed()) {
456 throwExceptionForClosedState(exceptionState); 392 throwExceptionForClosedState(exceptionState);
457 return nullptr; 393 return nullptr;
458 } 394 }
459 395
460 return AnalyserNode::create(*this, sampleRate()); 396 return AnalyserNode::create(*this, sampleRate());
461 } 397 }
462 398
463 GainNode* AbstractAudioContext::createGain(ExceptionState& exceptionState) 399 GainNode* BaseAudioContext::createGain(ExceptionState& exceptionState)
464 { 400 {
465 ASSERT(isMainThread()); 401 ASSERT(isMainThread());
466 if (isContextClosed()) { 402 if (isContextClosed()) {
467 throwExceptionForClosedState(exceptionState); 403 throwExceptionForClosedState(exceptionState);
468 return nullptr; 404 return nullptr;
469 } 405 }
470 406
471 return GainNode::create(*this, sampleRate()); 407 return GainNode::create(*this, sampleRate());
472 } 408 }
473 409
474 DelayNode* AbstractAudioContext::createDelay(ExceptionState& exceptionState) 410 DelayNode* BaseAudioContext::createDelay(ExceptionState& exceptionState)
475 { 411 {
476 const double defaultMaxDelayTime = 1; 412 const double defaultMaxDelayTime = 1;
477 return createDelay(defaultMaxDelayTime, exceptionState); 413 return createDelay(defaultMaxDelayTime, exceptionState);
478 } 414 }
479 415
480 DelayNode* AbstractAudioContext::createDelay(double maxDelayTime, ExceptionState & exceptionState) 416 DelayNode* BaseAudioContext::createDelay(double maxDelayTime, ExceptionState& ex ceptionState)
481 { 417 {
482 ASSERT(isMainThread()); 418 ASSERT(isMainThread());
483 if (isContextClosed()) { 419 if (isContextClosed()) {
484 throwExceptionForClosedState(exceptionState); 420 throwExceptionForClosedState(exceptionState);
485 return nullptr; 421 return nullptr;
486 } 422 }
487 423
488 return DelayNode::create(*this, sampleRate(), maxDelayTime, exceptionState); 424 return DelayNode::create(*this, sampleRate(), maxDelayTime, exceptionState);
489 } 425 }
490 426
491 ChannelSplitterNode* AbstractAudioContext::createChannelSplitter(ExceptionState& exceptionState) 427 ChannelSplitterNode* BaseAudioContext::createChannelSplitter(ExceptionState& exc eptionState)
492 { 428 {
493 const unsigned ChannelSplitterDefaultNumberOfOutputs = 6; 429 const unsigned ChannelSplitterDefaultNumberOfOutputs = 6;
494 return createChannelSplitter(ChannelSplitterDefaultNumberOfOutputs, exceptio nState); 430 return createChannelSplitter(ChannelSplitterDefaultNumberOfOutputs, exceptio nState);
495 } 431 }
496 432
497 ChannelSplitterNode* AbstractAudioContext::createChannelSplitter(size_t numberOf Outputs, ExceptionState& exceptionState) 433 ChannelSplitterNode* BaseAudioContext::createChannelSplitter(size_t numberOfOutp uts, ExceptionState& exceptionState)
498 { 434 {
499 ASSERT(isMainThread()); 435 ASSERT(isMainThread());
500 436
501 if (isContextClosed()) { 437 if (isContextClosed()) {
502 throwExceptionForClosedState(exceptionState); 438 throwExceptionForClosedState(exceptionState);
503 return nullptr; 439 return nullptr;
504 } 440 }
505 441
506 ChannelSplitterNode* node = ChannelSplitterNode::create(*this, sampleRate(), numberOfOutputs); 442 ChannelSplitterNode* node = ChannelSplitterNode::create(*this, sampleRate(), numberOfOutputs);
507 443
508 if (!node) { 444 if (!node) {
509 exceptionState.throwDOMException( 445 exceptionState.throwDOMException(
510 IndexSizeError, 446 IndexSizeError,
511 "number of outputs (" + String::number(numberOfOutputs) 447 "number of outputs (" + String::number(numberOfOutputs)
512 + ") must be between 1 and " 448 + ") must be between 1 and "
513 + String::number(AbstractAudioContext::maxNumberOfChannels()) + ".") ; 449 + String::number(BaseAudioContext::maxNumberOfChannels()) + ".");
514 return nullptr; 450 return nullptr;
515 } 451 }
516 452
517 return node; 453 return node;
518 } 454 }
519 455
520 ChannelMergerNode* AbstractAudioContext::createChannelMerger(ExceptionState& exc eptionState) 456 ChannelMergerNode* BaseAudioContext::createChannelMerger(ExceptionState& excepti onState)
521 { 457 {
522 const unsigned ChannelMergerDefaultNumberOfInputs = 6; 458 const unsigned ChannelMergerDefaultNumberOfInputs = 6;
523 return createChannelMerger(ChannelMergerDefaultNumberOfInputs, exceptionStat e); 459 return createChannelMerger(ChannelMergerDefaultNumberOfInputs, exceptionStat e);
524 } 460 }
525 461
526 ChannelMergerNode* AbstractAudioContext::createChannelMerger(size_t numberOfInpu ts, ExceptionState& exceptionState) 462 ChannelMergerNode* BaseAudioContext::createChannelMerger(size_t numberOfInputs, ExceptionState& exceptionState)
527 { 463 {
528 ASSERT(isMainThread()); 464 ASSERT(isMainThread());
529 if (isContextClosed()) { 465 if (isContextClosed()) {
530 throwExceptionForClosedState(exceptionState); 466 throwExceptionForClosedState(exceptionState);
531 return nullptr; 467 return nullptr;
532 } 468 }
533 469
534 ChannelMergerNode* node = ChannelMergerNode::create(*this, sampleRate(), num berOfInputs); 470 ChannelMergerNode* node = ChannelMergerNode::create(*this, sampleRate(), num berOfInputs);
535 471
536 if (!node) { 472 if (!node) {
537 exceptionState.throwDOMException( 473 exceptionState.throwDOMException(
538 IndexSizeError, 474 IndexSizeError,
539 ExceptionMessages::indexOutsideRange<size_t>( 475 ExceptionMessages::indexOutsideRange<size_t>(
540 "number of inputs", 476 "number of inputs",
541 numberOfInputs, 477 numberOfInputs,
542 1, 478 1,
543 ExceptionMessages::InclusiveBound, 479 ExceptionMessages::InclusiveBound,
544 AbstractAudioContext::maxNumberOfChannels(), 480 BaseAudioContext::maxNumberOfChannels(),
545 ExceptionMessages::InclusiveBound)); 481 ExceptionMessages::InclusiveBound));
546 return nullptr; 482 return nullptr;
547 } 483 }
548 484
549 return node; 485 return node;
550 } 486 }
551 487
552 OscillatorNode* AbstractAudioContext::createOscillator(ExceptionState& exception State) 488 OscillatorNode* BaseAudioContext::createOscillator(ExceptionState& exceptionStat e)
553 { 489 {
554 ASSERT(isMainThread()); 490 ASSERT(isMainThread());
555 if (isContextClosed()) { 491 if (isContextClosed()) {
556 throwExceptionForClosedState(exceptionState); 492 throwExceptionForClosedState(exceptionState);
557 return nullptr; 493 return nullptr;
558 } 494 }
559 495
560 OscillatorNode* node = OscillatorNode::create(*this, sampleRate()); 496 OscillatorNode* node = OscillatorNode::create(*this, sampleRate());
561 497
562 // Do not add a reference to this source node now. The reference will be add ed when start() is 498 // Do not add a reference to this source node now. The reference will be add ed when start() is
563 // called. 499 // called.
564 500
565 return node; 501 return node;
566 } 502 }
567 503
568 PeriodicWave* AbstractAudioContext::createPeriodicWave(DOMFloat32Array* real, DO MFloat32Array* imag, ExceptionState& exceptionState) 504 PeriodicWave* BaseAudioContext::createPeriodicWave(DOMFloat32Array* real, DOMFlo at32Array* imag, ExceptionState& exceptionState)
569 { 505 {
570 return PeriodicWave::create(sampleRate(), real, imag, false); 506 return PeriodicWave::create(sampleRate(), real, imag, false);
571 } 507 }
572 508
573 PeriodicWave* AbstractAudioContext::createPeriodicWave(DOMFloat32Array* real, DO MFloat32Array* imag, const Dictionary& options, ExceptionState& exceptionState) 509 PeriodicWave* BaseAudioContext::createPeriodicWave(DOMFloat32Array* real, DOMFlo at32Array* imag, const Dictionary& options, ExceptionState& exceptionState)
574 { 510 {
575 ASSERT(isMainThread()); 511 ASSERT(isMainThread());
576 512
577 if (isContextClosed()) { 513 if (isContextClosed()) {
578 throwExceptionForClosedState(exceptionState); 514 throwExceptionForClosedState(exceptionState);
579 return nullptr; 515 return nullptr;
580 } 516 }
581 517
582 if (real->length() != imag->length()) { 518 if (real->length() != imag->length()) {
583 exceptionState.throwDOMException( 519 exceptionState.throwDOMException(
584 IndexSizeError, 520 IndexSizeError,
585 "length of real array (" + String::number(real->length()) 521 "length of real array (" + String::number(real->length())
586 + ") and length of imaginary array (" + String::number(imag->length ()) 522 + ") and length of imaginary array (" + String::number(imag->length ())
587 + ") must match."); 523 + ") must match.");
588 return nullptr; 524 return nullptr;
589 } 525 }
590 526
591 bool isNormalizationDisabled = false; 527 bool isNormalizationDisabled = false;
592 DictionaryHelper::getWithUndefinedOrNullCheck(options, "disableNormalization ", isNormalizationDisabled); 528 DictionaryHelper::getWithUndefinedOrNullCheck(options, "disableNormalization ", isNormalizationDisabled);
593 529
594 return PeriodicWave::create(sampleRate(), real, imag, isNormalizationDisable d); 530 return PeriodicWave::create(sampleRate(), real, imag, isNormalizationDisable d);
595 } 531 }
596 532
597 IIRFilterNode* AbstractAudioContext::createIIRFilter(Vector<double> feedforwardC oef, Vector<double> feedbackCoef, ExceptionState& exceptionState) 533 IIRFilterNode* BaseAudioContext::createIIRFilter(Vector<double> feedforwardCoef, Vector<double> feedbackCoef, ExceptionState& exceptionState)
598 { 534 {
599 ASSERT(isMainThread()); 535 ASSERT(isMainThread());
600 536
601 if (isContextClosed()) { 537 if (isContextClosed()) {
602 throwExceptionForClosedState(exceptionState); 538 throwExceptionForClosedState(exceptionState);
603 return nullptr; 539 return nullptr;
604 } 540 }
605 541
606 if (feedbackCoef.size() == 0 || (feedbackCoef.size() > IIRFilter::kMaxOrder + 1)) { 542 if (feedbackCoef.size() == 0 || (feedbackCoef.size() > IIRFilter::kMaxOrder + 1)) {
607 exceptionState.throwDOMException( 543 exceptionState.throwDOMException(
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
671 exceptionState.throwDOMException( 607 exceptionState.throwDOMException(
672 InvalidStateError, 608 InvalidStateError,
673 ExceptionMessages::notAFiniteNumber(c, name.ascii().data())); 609 ExceptionMessages::notAFiniteNumber(c, name.ascii().data()));
674 return nullptr; 610 return nullptr;
675 } 611 }
676 } 612 }
677 613
678 return IIRFilterNode::create(*this, sampleRate(), feedforwardCoef, feedbackC oef); 614 return IIRFilterNode::create(*this, sampleRate(), feedforwardCoef, feedbackC oef);
679 } 615 }
680 616
681 PeriodicWave* AbstractAudioContext::periodicWave(int type) 617 PeriodicWave* BaseAudioContext::periodicWave(int type)
682 { 618 {
683 switch (type) { 619 switch (type) {
684 case OscillatorHandler::SINE: 620 case OscillatorHandler::SINE:
685 // Initialize the table if necessary 621 // Initialize the table if necessary
686 if (!m_periodicWaveSine) 622 if (!m_periodicWaveSine)
687 m_periodicWaveSine = PeriodicWave::createSine(sampleRate()); 623 m_periodicWaveSine = PeriodicWave::createSine(sampleRate());
688 return m_periodicWaveSine; 624 return m_periodicWaveSine;
689 case OscillatorHandler::SQUARE: 625 case OscillatorHandler::SQUARE:
690 // Initialize the table if necessary 626 // Initialize the table if necessary
691 if (!m_periodicWaveSquare) 627 if (!m_periodicWaveSquare)
692 m_periodicWaveSquare = PeriodicWave::createSquare(sampleRate()); 628 m_periodicWaveSquare = PeriodicWave::createSquare(sampleRate());
693 return m_periodicWaveSquare; 629 return m_periodicWaveSquare;
694 case OscillatorHandler::SAWTOOTH: 630 case OscillatorHandler::SAWTOOTH:
695 // Initialize the table if necessary 631 // Initialize the table if necessary
696 if (!m_periodicWaveSawtooth) 632 if (!m_periodicWaveSawtooth)
697 m_periodicWaveSawtooth = PeriodicWave::createSawtooth(sampleRate()); 633 m_periodicWaveSawtooth = PeriodicWave::createSawtooth(sampleRate());
698 return m_periodicWaveSawtooth; 634 return m_periodicWaveSawtooth;
699 case OscillatorHandler::TRIANGLE: 635 case OscillatorHandler::TRIANGLE:
700 // Initialize the table if necessary 636 // Initialize the table if necessary
701 if (!m_periodicWaveTriangle) 637 if (!m_periodicWaveTriangle)
702 m_periodicWaveTriangle = PeriodicWave::createTriangle(sampleRate()); 638 m_periodicWaveTriangle = PeriodicWave::createTriangle(sampleRate());
703 return m_periodicWaveTriangle; 639 return m_periodicWaveTriangle;
704 default: 640 default:
705 ASSERT_NOT_REACHED(); 641 ASSERT_NOT_REACHED();
706 return nullptr; 642 return nullptr;
707 } 643 }
708 } 644 }
709 645
710 String AbstractAudioContext::state() const 646 String BaseAudioContext::state() const
711 { 647 {
712 // These strings had better match the strings for AudioContextState in Audio Context.idl. 648 // These strings had better match the strings for AudioContextState in Audio Context.idl.
713 switch (m_contextState) { 649 switch (m_contextState) {
714 case Suspended: 650 case Suspended:
715 return "suspended"; 651 return "suspended";
716 case Running: 652 case Running:
717 return "running"; 653 return "running";
718 case Closed: 654 case Closed:
719 return "closed"; 655 return "closed";
720 } 656 }
721 ASSERT_NOT_REACHED(); 657 ASSERT_NOT_REACHED();
722 return ""; 658 return "";
723 } 659 }
724 660
725 void AbstractAudioContext::setContextState(AudioContextState newState) 661 void BaseAudioContext::setContextState(AudioContextState newState)
726 { 662 {
727 ASSERT(isMainThread()); 663 ASSERT(isMainThread());
728 664
729 // Validate the transitions. The valid transitions are Suspended->Running, Running->Suspended, 665 // Validate the transitions. The valid transitions are Suspended->Running, Running->Suspended,
730 // and anything->Closed. 666 // and anything->Closed.
731 switch (newState) { 667 switch (newState) {
732 case Suspended: 668 case Suspended:
733 ASSERT(m_contextState == Running); 669 ASSERT(m_contextState == Running);
734 break; 670 break;
735 case Running: 671 case Running:
736 ASSERT(m_contextState == Suspended); 672 ASSERT(m_contextState == Suspended);
737 break; 673 break;
738 case Closed: 674 case Closed:
739 ASSERT(m_contextState != Closed); 675 ASSERT(m_contextState != Closed);
740 break; 676 break;
741 } 677 }
742 678
743 if (newState == m_contextState) { 679 if (newState == m_contextState) {
744 // ASSERTs above failed; just return. 680 // ASSERTs above failed; just return.
745 return; 681 return;
746 } 682 }
747 683
748 m_contextState = newState; 684 m_contextState = newState;
749 685
750 // Notify context that state changed 686 // Notify context that state changed
751 if (getExecutionContext()) 687 if (getExecutionContext())
752 getExecutionContext()->postTask(BLINK_FROM_HERE, createSameThreadTask(&A bstractAudioContext::notifyStateChange, this)); 688 getExecutionContext()->postTask(BLINK_FROM_HERE, createSameThreadTask(&B aseAudioContext::notifyStateChange, this));
753 } 689 }
754 690
755 void AbstractAudioContext::notifyStateChange() 691 void BaseAudioContext::notifyStateChange()
756 { 692 {
757 dispatchEvent(Event::create(EventTypeNames::statechange)); 693 dispatchEvent(Event::create(EventTypeNames::statechange));
758 } 694 }
759 695
760 void AbstractAudioContext::notifySourceNodeFinishedProcessing(AudioHandler* hand ler) 696 void BaseAudioContext::notifySourceNodeFinishedProcessing(AudioHandler* handler)
761 { 697 {
762 ASSERT(isAudioThread()); 698 ASSERT(isAudioThread());
763 m_finishedSourceHandlers.append(handler); 699 m_finishedSourceHandlers.append(handler);
764 } 700 }
765 701
766 void AbstractAudioContext::releaseFinishedSourceNodes() 702 void BaseAudioContext::releaseFinishedSourceNodes()
767 { 703 {
768 ASSERT(isGraphOwner()); 704 ASSERT(isGraphOwner());
769 ASSERT(isAudioThread()); 705 ASSERT(isAudioThread());
770 for (AudioHandler* handler : m_finishedSourceHandlers) { 706 for (AudioHandler* handler : m_finishedSourceHandlers) {
771 for (unsigned i = 0; i < m_activeSourceNodes.size(); ++i) { 707 for (unsigned i = 0; i < m_activeSourceNodes.size(); ++i) {
772 if (handler == &m_activeSourceNodes[i]->handler()) { 708 if (handler == &m_activeSourceNodes[i]->handler()) {
773 handler->breakConnection(); 709 handler->breakConnection();
774 m_activeSourceNodes.remove(i); 710 m_activeSourceNodes.remove(i);
775 break; 711 break;
776 } 712 }
777 } 713 }
778 } 714 }
779 715
780 m_finishedSourceHandlers.clear(); 716 m_finishedSourceHandlers.clear();
781 } 717 }
782 718
783 void AbstractAudioContext::notifySourceNodeStartedProcessing(AudioNode* node) 719 void BaseAudioContext::notifySourceNodeStartedProcessing(AudioNode* node)
784 { 720 {
785 ASSERT(isMainThread()); 721 ASSERT(isMainThread());
786 AutoLocker locker(this); 722 AutoLocker locker(this);
787 723
788 m_activeSourceNodes.append(node); 724 m_activeSourceNodes.append(node);
789 node->handler().makeConnection(); 725 node->handler().makeConnection();
790 } 726 }
791 727
792 void AbstractAudioContext::releaseActiveSourceNodes() 728 void BaseAudioContext::releaseActiveSourceNodes()
793 { 729 {
794 ASSERT(isMainThread()); 730 ASSERT(isMainThread());
795 for (auto& sourceNode : m_activeSourceNodes) 731 for (auto& sourceNode : m_activeSourceNodes)
796 sourceNode->handler().breakConnection(); 732 sourceNode->handler().breakConnection();
797 733
798 m_activeSourceNodes.clear(); 734 m_activeSourceNodes.clear();
799 } 735 }
800 736
801 void AbstractAudioContext::handleStoppableSourceNodes() 737 void BaseAudioContext::handleStoppableSourceNodes()
802 { 738 {
803 ASSERT(isGraphOwner()); 739 ASSERT(isGraphOwner());
804 740
805 // Find AudioBufferSourceNodes to see if we can stop playing them. 741 // Find AudioBufferSourceNodes to see if we can stop playing them.
806 for (AudioNode* node : m_activeSourceNodes) { 742 for (AudioNode* node : m_activeSourceNodes) {
807 if (node->handler().getNodeType() == AudioHandler::NodeTypeAudioBufferSo urce) { 743 if (node->handler().getNodeType() == AudioHandler::NodeTypeAudioBufferSo urce) {
808 AudioBufferSourceNode* sourceNode = static_cast<AudioBufferSourceNod e*>(node); 744 AudioBufferSourceNode* sourceNode = static_cast<AudioBufferSourceNod e*>(node);
809 sourceNode->audioBufferSourceHandler().handleStoppableSourceNode(); 745 sourceNode->audioBufferSourceHandler().handleStoppableSourceNode();
810 } 746 }
811 } 747 }
812 } 748 }
813 749
814 void AbstractAudioContext::handlePreRenderTasks() 750 void BaseAudioContext::handlePreRenderTasks()
815 { 751 {
816 ASSERT(isAudioThread()); 752 ASSERT(isAudioThread());
817 753
818 // At the beginning of every render quantum, try to update the internal rend ering graph state (from main thread changes). 754 // At the beginning of every render quantum, try to update the internal rend ering graph state (from main thread changes).
819 // It's OK if the tryLock() fails, we'll just take slightly longer to pick u p the changes. 755 // It's OK if the tryLock() fails, we'll just take slightly longer to pick u p the changes.
820 if (tryLock()) { 756 if (tryLock()) {
821 deferredTaskHandler().handleDeferredTasks(); 757 deferredTaskHandler().handleDeferredTasks();
822 758
823 resolvePromisesForResume(); 759 resolvePromisesForResume();
824 760
825 // Check to see if source nodes can be stopped because the end time has passed. 761 // Check to see if source nodes can be stopped because the end time has passed.
826 handleStoppableSourceNodes(); 762 handleStoppableSourceNodes();
827 763
828 unlock(); 764 unlock();
829 } 765 }
830 } 766 }
831 767
832 void AbstractAudioContext::handlePostRenderTasks() 768 void BaseAudioContext::handlePostRenderTasks()
833 { 769 {
834 ASSERT(isAudioThread()); 770 ASSERT(isAudioThread());
835 771
836 // Must use a tryLock() here too. Don't worry, the lock will very rarely be contended and this method is called frequently. 772 // Must use a tryLock() here too. Don't worry, the lock will very rarely be contended and this method is called frequently.
837 // The worst that can happen is that there will be some nodes which will tak e slightly longer than usual to be deleted or removed 773 // The worst that can happen is that there will be some nodes which will tak e slightly longer than usual to be deleted or removed
838 // from the render graph (in which case they'll render silence). 774 // from the render graph (in which case they'll render silence).
839 if (tryLock()) { 775 if (tryLock()) {
840 // Take care of AudioNode tasks where the tryLock() failed previously. 776 // Take care of AudioNode tasks where the tryLock() failed previously.
841 deferredTaskHandler().breakConnections(); 777 deferredTaskHandler().breakConnections();
842 778
843 // Dynamically clean up nodes which are no longer needed. 779 // Dynamically clean up nodes which are no longer needed.
844 releaseFinishedSourceNodes(); 780 releaseFinishedSourceNodes();
845 781
846 deferredTaskHandler().handleDeferredTasks(); 782 deferredTaskHandler().handleDeferredTasks();
847 deferredTaskHandler().requestToDeleteHandlersOnMainThread(); 783 deferredTaskHandler().requestToDeleteHandlersOnMainThread();
848 784
849 unlock(); 785 unlock();
850 } 786 }
851 } 787 }
852 788
853 void AbstractAudioContext::resolvePromisesForResumeOnMainThread() 789 void BaseAudioContext::resolvePromisesForResumeOnMainThread()
854 { 790 {
855 ASSERT(isMainThread()); 791 ASSERT(isMainThread());
856 AutoLocker locker(this); 792 AutoLocker locker(this);
857 793
858 for (auto& resolver : m_resumeResolvers) { 794 for (auto& resolver : m_resumeResolvers) {
859 if (m_contextState == Closed) { 795 if (m_contextState == Closed) {
860 resolver->reject( 796 resolver->reject(
861 DOMException::create(InvalidStateError, "Cannot resume a context that has been closed")); 797 DOMException::create(InvalidStateError, "Cannot resume a context that has been closed"));
862 } else { 798 } else {
863 resolver->resolve(); 799 resolver->resolve();
864 } 800 }
865 } 801 }
866 802
867 m_resumeResolvers.clear(); 803 m_resumeResolvers.clear();
868 m_isResolvingResumePromises = false; 804 m_isResolvingResumePromises = false;
869 } 805 }
870 806
871 void AbstractAudioContext::resolvePromisesForResume() 807 void BaseAudioContext::resolvePromisesForResume()
872 { 808 {
873 // This runs inside the AbstractAudioContext's lock when handling pre-render tasks. 809 // This runs inside the BaseAudioContext's lock when handling pre-render tas ks.
874 ASSERT(isAudioThread()); 810 ASSERT(isAudioThread());
875 ASSERT(isGraphOwner()); 811 ASSERT(isGraphOwner());
876 812
877 // Resolve any pending promises created by resume(). Only do this if we have n't already started 813 // Resolve any pending promises created by resume(). Only do this if we have n't already started
878 // resolving these promises. This gets called very often and it takes some t ime to resolve the 814 // resolving these promises. This gets called very often and it takes some t ime to resolve the
879 // promises in the main thread. 815 // promises in the main thread.
880 if (!m_isResolvingResumePromises && m_resumeResolvers.size() > 0) { 816 if (!m_isResolvingResumePromises && m_resumeResolvers.size() > 0) {
881 m_isResolvingResumePromises = true; 817 m_isResolvingResumePromises = true;
882 Platform::current()->mainThread()->getWebTaskRunner()->postTask(BLINK_FR OM_HERE, threadSafeBind(&AbstractAudioContext::resolvePromisesForResumeOnMainThr ead, this)); 818 Platform::current()->mainThread()->getWebTaskRunner()->postTask(BLINK_FR OM_HERE, threadSafeBind(&BaseAudioContext::resolvePromisesForResumeOnMainThread, this));
883 } 819 }
884 } 820 }
885 821
886 void AbstractAudioContext::rejectPendingResolvers() 822 void BaseAudioContext::rejectPendingResolvers()
887 { 823 {
888 ASSERT(isMainThread()); 824 ASSERT(isMainThread());
889 825
890 // Audio context is closing down so reject any resume promises that are stil l pending. 826 // Audio context is closing down so reject any resume promises that are stil l pending.
891 827
892 for (auto& resolver : m_resumeResolvers) { 828 for (auto& resolver : m_resumeResolvers) {
893 resolver->reject(DOMException::create(InvalidStateError, "Audio context is going away")); 829 resolver->reject(DOMException::create(InvalidStateError, "Audio context is going away"));
894 } 830 }
895 m_resumeResolvers.clear(); 831 m_resumeResolvers.clear();
896 m_isResolvingResumePromises = false; 832 m_isResolvingResumePromises = false;
897 833
898 // Now reject any pending decodeAudioData resolvers 834 // Now reject any pending decodeAudioData resolvers
899 for (auto& resolver : m_decodeAudioResolvers) 835 for (auto& resolver : m_decodeAudioResolvers)
900 resolver->reject(DOMException::create(InvalidStateError, "Audio context is going away")); 836 resolver->reject(DOMException::create(InvalidStateError, "Audio context is going away"));
901 m_decodeAudioResolvers.clear(); 837 m_decodeAudioResolvers.clear();
902 } 838 }
903 839
904 const AtomicString& AbstractAudioContext::interfaceName() const 840 const AtomicString& BaseAudioContext::interfaceName() const
905 { 841 {
906 return EventTargetNames::AudioContext; 842 return EventTargetNames::AudioContext;
907 } 843 }
908 844
909 ExecutionContext* AbstractAudioContext::getExecutionContext() const 845 ExecutionContext* BaseAudioContext::getExecutionContext() const
910 { 846 {
911 return ActiveDOMObject::getExecutionContext(); 847 return ActiveDOMObject::getExecutionContext();
912 } 848 }
913 849
914 void AbstractAudioContext::startRendering() 850 void BaseAudioContext::startRendering()
915 { 851 {
916 // This is called for both online and offline contexts. 852 // This is called for both online and offline contexts.
917 ASSERT(isMainThread()); 853 ASSERT(isMainThread());
918 ASSERT(m_destinationNode); 854 ASSERT(m_destinationNode);
919 855
920 if (m_contextState == Suspended) { 856 if (m_contextState == Suspended) {
921 destination()->audioDestinationHandler().startRendering(); 857 destination()->audioDestinationHandler().startRendering();
922 setContextState(Running); 858 setContextState(Running);
923 } 859 }
924 } 860 }
925 861
926 DEFINE_TRACE(AbstractAudioContext) 862 DEFINE_TRACE(BaseAudioContext)
927 { 863 {
928 visitor->trace(m_destinationNode); 864 visitor->trace(m_destinationNode);
929 visitor->trace(m_listener); 865 visitor->trace(m_listener);
930 // trace() can be called in AbstractAudioContext constructor, and 866 // trace() can be called in BaseAudioContext constructor, and
931 // m_contextGraphMutex might be unavailable. 867 // m_contextGraphMutex might be unavailable.
932 if (m_didInitializeContextGraphMutex) { 868 if (m_didInitializeContextGraphMutex) {
933 AutoLocker lock(this); 869 AutoLocker lock(this);
934 visitor->trace(m_activeSourceNodes); 870 visitor->trace(m_activeSourceNodes);
935 } else { 871 } else {
936 visitor->trace(m_activeSourceNodes); 872 visitor->trace(m_activeSourceNodes);
937 } 873 }
938 visitor->trace(m_resumeResolvers); 874 visitor->trace(m_resumeResolvers);
939 visitor->trace(m_decodeAudioResolvers); 875 visitor->trace(m_decodeAudioResolvers);
940 876
941 visitor->trace(m_periodicWaveSine); 877 visitor->trace(m_periodicWaveSine);
942 visitor->trace(m_periodicWaveSquare); 878 visitor->trace(m_periodicWaveSquare);
943 visitor->trace(m_periodicWaveSawtooth); 879 visitor->trace(m_periodicWaveSawtooth);
944 visitor->trace(m_periodicWaveTriangle); 880 visitor->trace(m_periodicWaveTriangle);
945 RefCountedGarbageCollectedEventTargetWithInlineData<AbstractAudioContext>::t race(visitor); 881 RefCountedGarbageCollectedEventTargetWithInlineData<BaseAudioContext>::trace (visitor);
946 ActiveDOMObject::trace(visitor); 882 ActiveDOMObject::trace(visitor);
947 } 883 }
948 884
949 SecurityOrigin* AbstractAudioContext::getSecurityOrigin() const 885 SecurityOrigin* BaseAudioContext::getSecurityOrigin() const
950 { 886 {
951 if (getExecutionContext()) 887 if (getExecutionContext())
952 return getExecutionContext()->getSecurityOrigin(); 888 return getExecutionContext()->getSecurityOrigin();
953 889
954 return nullptr; 890 return nullptr;
955 } 891 }
956 892
957 } // namespace blink 893 } // namespace blink
958 894
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698