Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(385)

Side by Side Diff: third_party/WebKit/Source/modules/webaudio/AbstractAudioContext.cpp

Issue 1865583002: Implement BaseAudioContext (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Created 4 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 /*
2 * Copyright (C) 2010, Google Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND AN Y
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
15 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
16 * DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR AN Y
17 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
18 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
19 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND O N
20 * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
21 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
22 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
23 */
24
25 #include "modules/webaudio/AbstractAudioContext.h"
26 #include "bindings/core/v8/Dictionary.h"
27 #include "bindings/core/v8/ExceptionMessages.h"
28 #include "bindings/core/v8/ExceptionState.h"
29 #include "bindings/core/v8/ScriptPromiseResolver.h"
30 #include "bindings/core/v8/ScriptState.h"
31 #include "core/dom/DOMException.h"
32 #include "core/dom/Document.h"
33 #include "core/dom/ExceptionCode.h"
34 #include "core/dom/ExecutionContextTask.h"
35 #include "core/html/HTMLMediaElement.h"
36 #include "modules/mediastream/MediaStream.h"
37 #include "modules/webaudio/AnalyserNode.h"
38 #include "modules/webaudio/AudioBuffer.h"
39 #include "modules/webaudio/AudioBufferCallback.h"
40 #include "modules/webaudio/AudioBufferSourceNode.h"
41 #include "modules/webaudio/AudioContext.h"
42 #include "modules/webaudio/AudioListener.h"
43 #include "modules/webaudio/AudioNodeInput.h"
44 #include "modules/webaudio/AudioNodeOutput.h"
45 #include "modules/webaudio/BiquadFilterNode.h"
46 #include "modules/webaudio/ChannelMergerNode.h"
47 #include "modules/webaudio/ChannelSplitterNode.h"
48 #include "modules/webaudio/ConvolverNode.h"
49 #include "modules/webaudio/DefaultAudioDestinationNode.h"
50 #include "modules/webaudio/DelayNode.h"
51 #include "modules/webaudio/DynamicsCompressorNode.h"
52 #include "modules/webaudio/GainNode.h"
53 #include "modules/webaudio/IIRFilterNode.h"
54 #include "modules/webaudio/MediaElementAudioSourceNode.h"
55 #include "modules/webaudio/MediaStreamAudioDestinationNode.h"
56 #include "modules/webaudio/MediaStreamAudioSourceNode.h"
57 #include "modules/webaudio/OfflineAudioCompletionEvent.h"
58 #include "modules/webaudio/OfflineAudioContext.h"
59 #include "modules/webaudio/OfflineAudioDestinationNode.h"
60 #include "modules/webaudio/OscillatorNode.h"
61 #include "modules/webaudio/PannerNode.h"
62 #include "modules/webaudio/PeriodicWave.h"
63 #include "modules/webaudio/ScriptProcessorNode.h"
64 #include "modules/webaudio/StereoPannerNode.h"
65 #include "modules/webaudio/WaveShaperNode.h"
66 #include "platform/ThreadSafeFunctional.h"
67 #include "platform/audio/IIRFilter.h"
68 #include "public/platform/Platform.h"
69 #include "wtf/text/WTFString.h"
70
71 namespace blink {
72
73 AbstractAudioContext* AbstractAudioContext::create(Document& document, Exception State& exceptionState)
74 {
75 return AudioContext::create(document, exceptionState);
76 }
77
78 // FIXME(dominicc): Devolve these constructors to AudioContext
79 // and OfflineAudioContext respectively.
80
81 // Constructor for rendering to the audio hardware.
82 AbstractAudioContext::AbstractAudioContext(Document* document)
83 : ActiveScriptWrappable(this)
84 , ActiveDOMObject(document)
85 , m_destinationNode(nullptr)
86 , m_isCleared(false)
87 , m_isResolvingResumePromises(false)
88 , m_connectionCount(0)
89 , m_didInitializeContextGraphMutex(false)
90 , m_deferredTaskHandler(DeferredTaskHandler::create())
91 , m_contextState(Suspended)
92 , m_closedContextSampleRate(-1)
93 , m_periodicWaveSine(nullptr)
94 , m_periodicWaveSquare(nullptr)
95 , m_periodicWaveSawtooth(nullptr)
96 , m_periodicWaveTriangle(nullptr)
97 {
98 m_didInitializeContextGraphMutex = true;
99 m_destinationNode = DefaultAudioDestinationNode::create(this);
100
101 initialize();
102 }
103
104 // Constructor for offline (non-realtime) rendering.
105 AbstractAudioContext::AbstractAudioContext(Document* document, unsigned numberOf Channels, size_t numberOfFrames, float sampleRate)
106 : ActiveScriptWrappable(this)
107 , ActiveDOMObject(document)
108 , m_destinationNode(nullptr)
109 , m_isCleared(false)
110 , m_isResolvingResumePromises(false)
111 , m_connectionCount(0)
112 , m_didInitializeContextGraphMutex(false)
113 , m_deferredTaskHandler(DeferredTaskHandler::create())
114 , m_contextState(Suspended)
115 , m_closedContextSampleRate(-1)
116 , m_periodicWaveSine(nullptr)
117 , m_periodicWaveSquare(nullptr)
118 , m_periodicWaveSawtooth(nullptr)
119 , m_periodicWaveTriangle(nullptr)
120 {
121 m_didInitializeContextGraphMutex = true;
122 }
123
124 AbstractAudioContext::~AbstractAudioContext()
125 {
126 deferredTaskHandler().contextWillBeDestroyed();
127 // AudioNodes keep a reference to their context, so there should be no way t o be in the destructor if there are still AudioNodes around.
128 ASSERT(!isDestinationInitialized());
129 ASSERT(!m_activeSourceNodes.size());
130 ASSERT(!m_finishedSourceHandlers.size());
131 ASSERT(!m_isResolvingResumePromises);
132 ASSERT(!m_resumeResolvers.size());
133 }
134
135 void AbstractAudioContext::initialize()
136 {
137 if (isDestinationInitialized())
138 return;
139
140 FFTFrame::initialize();
141 m_listener = AudioListener::create();
142
143 if (m_destinationNode.get()) {
144 m_destinationNode->handler().initialize();
145 }
146 }
147
148 void AbstractAudioContext::clear()
149 {
150 m_destinationNode.clear();
151 // The audio rendering thread is dead. Nobody will schedule AudioHandler
152 // deletion. Let's do it ourselves.
153 deferredTaskHandler().clearHandlersToBeDeleted();
154 m_isCleared = true;
155 }
156
157 void AbstractAudioContext::uninitialize()
158 {
159 ASSERT(isMainThread());
160
161 if (!isDestinationInitialized())
162 return;
163
164 // This stops the audio thread and all audio rendering.
165 if (m_destinationNode)
166 m_destinationNode->handler().uninitialize();
167
168 // Get rid of the sources which may still be playing.
169 releaseActiveSourceNodes();
170
171 // Reject any pending resolvers before we go away.
172 rejectPendingResolvers();
173 didClose();
174
175 ASSERT(m_listener);
176 m_listener->waitForHRTFDatabaseLoaderThreadCompletion();
177
178 clear();
179 }
180
181 void AbstractAudioContext::stop()
182 {
183 uninitialize();
184 }
185
186 bool AbstractAudioContext::hasPendingActivity() const
187 {
188 // There's no pending activity if the audio context has been cleared.
189 return !m_isCleared;
190 }
191
192 void AbstractAudioContext::throwExceptionForClosedState(ExceptionState& exceptio nState)
193 {
194 exceptionState.throwDOMException(InvalidStateError, "AudioContext has been c losed.");
195 }
196
197 AudioBuffer* AbstractAudioContext::createBuffer(unsigned numberOfChannels, size_ t numberOfFrames, float sampleRate, ExceptionState& exceptionState)
198 {
199 // It's ok to call createBuffer, even if the context is closed because the A udioBuffer doesn't
200 // really "belong" to any particular context.
201
202 return AudioBuffer::create(numberOfChannels, numberOfFrames, sampleRate, exc eptionState);
203 }
204
205 ScriptPromise AbstractAudioContext::decodeAudioData(ScriptState* scriptState, DO MArrayBuffer* audioData, AudioBufferCallback* successCallback, AudioBufferCallba ck* errorCallback, ExceptionState& exceptionState)
206 {
207 ASSERT(isMainThread());
208 ASSERT(audioData);
209
210 ScriptPromiseResolver* resolver = ScriptPromiseResolver::create(scriptState) ;
211 ScriptPromise promise = resolver->promise();
212
213 float rate = isContextClosed() ? closedContextSampleRate() : sampleRate();
214
215 ASSERT(rate > 0);
216
217 m_decodeAudioResolvers.add(resolver);
218 m_audioDecoder.decodeAsync(audioData, rate, successCallback, errorCallback, resolver, this);
219
220 return promise;
221 }
222
223 void AbstractAudioContext::handleDecodeAudioData(AudioBuffer* audioBuffer, Scrip tPromiseResolver* resolver, AudioBufferCallback* successCallback, AudioBufferCal lback* errorCallback)
224 {
225 ASSERT(isMainThread());
226
227 if (audioBuffer) {
228 // Resolve promise successfully and run the success callback
229 resolver->resolve(audioBuffer);
230 if (successCallback)
231 successCallback->handleEvent(audioBuffer);
232 } else {
233 // Reject the promise and run the error callback
234 DOMException* error = DOMException::create(EncodingError, "Unable to dec ode audio data");
235 resolver->reject(error);
236 if (errorCallback)
237 errorCallback->handleEvent(error);
238 }
239
240 // We've resolved the promise. Remove it now.
241 ASSERT(m_decodeAudioResolvers.contains(resolver));
242 m_decodeAudioResolvers.remove(resolver);
243 }
244
245 AudioBufferSourceNode* AbstractAudioContext::createBufferSource(ExceptionState& exceptionState)
246 {
247 ASSERT(isMainThread());
248
249 if (isContextClosed()) {
250 throwExceptionForClosedState(exceptionState);
251 return nullptr;
252 }
253
254 AudioBufferSourceNode* node = AudioBufferSourceNode::create(*this, sampleRat e());
255
256 // Do not add a reference to this source node now. The reference will be add ed when start() is
257 // called.
258
259 return node;
260 }
261
262 MediaElementAudioSourceNode* AbstractAudioContext::createMediaElementSource(HTML MediaElement* mediaElement, ExceptionState& exceptionState)
263 {
264 ASSERT(isMainThread());
265
266 if (isContextClosed()) {
267 throwExceptionForClosedState(exceptionState);
268 return nullptr;
269 }
270
271 // First check if this media element already has a source node.
272 if (mediaElement->audioSourceNode()) {
273 exceptionState.throwDOMException(
274 InvalidStateError,
275 "HTMLMediaElement already connected previously to a different MediaE lementSourceNode.");
276 return nullptr;
277 }
278
279 MediaElementAudioSourceNode* node = MediaElementAudioSourceNode::create(*thi s, *mediaElement);
280
281 mediaElement->setAudioSourceNode(node);
282
283 notifySourceNodeStartedProcessing(node); // context keeps reference until no de is disconnected
284 return node;
285 }
286
287 MediaStreamAudioSourceNode* AbstractAudioContext::createMediaStreamSource(MediaS tream* mediaStream, ExceptionState& exceptionState)
288 {
289 ASSERT(isMainThread());
290
291 if (isContextClosed()) {
292 throwExceptionForClosedState(exceptionState);
293 return nullptr;
294 }
295
296 MediaStreamTrackVector audioTracks = mediaStream->getAudioTracks();
297 if (audioTracks.isEmpty()) {
298 exceptionState.throwDOMException(
299 InvalidStateError,
300 "MediaStream has no audio track");
301 return nullptr;
302 }
303
304 // Use the first audio track in the media stream.
305 MediaStreamTrack* audioTrack = audioTracks[0];
306 OwnPtr<AudioSourceProvider> provider = audioTrack->createWebAudioSource();
307 MediaStreamAudioSourceNode* node = MediaStreamAudioSourceNode::create(*this, *mediaStream, audioTrack, provider.release());
308
309 // FIXME: Only stereo streams are supported right now. We should be able to accept multi-channel streams.
310 node->setFormat(2, sampleRate());
311
312 notifySourceNodeStartedProcessing(node); // context keeps reference until no de is disconnected
313 return node;
314 }
315
316 MediaStreamAudioDestinationNode* AbstractAudioContext::createMediaStreamDestinat ion(ExceptionState& exceptionState)
317 {
318 if (isContextClosed()) {
319 throwExceptionForClosedState(exceptionState);
320 return nullptr;
321 }
322
323 // Set number of output channels to stereo by default.
324 return MediaStreamAudioDestinationNode::create(*this, 2);
325 }
326
327 ScriptProcessorNode* AbstractAudioContext::createScriptProcessor(ExceptionState& exceptionState)
328 {
329 // Set number of input/output channels to stereo by default.
330 return createScriptProcessor(0, 2, 2, exceptionState);
331 }
332
333 ScriptProcessorNode* AbstractAudioContext::createScriptProcessor(size_t bufferSi ze, ExceptionState& exceptionState)
334 {
335 // Set number of input/output channels to stereo by default.
336 return createScriptProcessor(bufferSize, 2, 2, exceptionState);
337 }
338
339 ScriptProcessorNode* AbstractAudioContext::createScriptProcessor(size_t bufferSi ze, size_t numberOfInputChannels, ExceptionState& exceptionState)
340 {
341 // Set number of output channels to stereo by default.
342 return createScriptProcessor(bufferSize, numberOfInputChannels, 2, exception State);
343 }
344
345 ScriptProcessorNode* AbstractAudioContext::createScriptProcessor(size_t bufferSi ze, size_t numberOfInputChannels, size_t numberOfOutputChannels, ExceptionState& exceptionState)
346 {
347 ASSERT(isMainThread());
348
349 if (isContextClosed()) {
350 throwExceptionForClosedState(exceptionState);
351 return nullptr;
352 }
353
354 ScriptProcessorNode* node = ScriptProcessorNode::create(*this, sampleRate(), bufferSize, numberOfInputChannels, numberOfOutputChannels);
355
356 if (!node) {
357 if (!numberOfInputChannels && !numberOfOutputChannels) {
358 exceptionState.throwDOMException(
359 IndexSizeError,
360 "number of input channels and output channels cannot both be zer o.");
361 } else if (numberOfInputChannels > AbstractAudioContext::maxNumberOfChan nels()) {
362 exceptionState.throwDOMException(
363 IndexSizeError,
364 "number of input channels (" + String::number(numberOfInputChann els)
365 + ") exceeds maximum ("
366 + String::number(AbstractAudioContext::maxNumberOfChannels()) + ").");
367 } else if (numberOfOutputChannels > AbstractAudioContext::maxNumberOfCha nnels()) {
368 exceptionState.throwDOMException(
369 IndexSizeError,
370 "number of output channels (" + String::number(numberOfInputChan nels)
371 + ") exceeds maximum ("
372 + String::number(AbstractAudioContext::maxNumberOfChannels()) + ").");
373 } else {
374 exceptionState.throwDOMException(
375 IndexSizeError,
376 "buffer size (" + String::number(bufferSize)
377 + ") must be a power of two between 256 and 16384.");
378 }
379 return nullptr;
380 }
381
382 notifySourceNodeStartedProcessing(node); // context keeps reference until we stop making javascript rendering callbacks
383 return node;
384 }
385
386 StereoPannerNode* AbstractAudioContext::createStereoPanner(ExceptionState& excep tionState)
387 {
388 ASSERT(isMainThread());
389 if (isContextClosed()) {
390 throwExceptionForClosedState(exceptionState);
391 return nullptr;
392 }
393
394 return StereoPannerNode::create(*this, sampleRate());
395 }
396
397 BiquadFilterNode* AbstractAudioContext::createBiquadFilter(ExceptionState& excep tionState)
398 {
399 ASSERT(isMainThread());
400 if (isContextClosed()) {
401 throwExceptionForClosedState(exceptionState);
402 return nullptr;
403 }
404
405 return BiquadFilterNode::create(*this, sampleRate());
406 }
407
408 WaveShaperNode* AbstractAudioContext::createWaveShaper(ExceptionState& exception State)
409 {
410 ASSERT(isMainThread());
411 if (isContextClosed()) {
412 throwExceptionForClosedState(exceptionState);
413 return nullptr;
414 }
415
416 return WaveShaperNode::create(*this);
417 }
418
419 PannerNode* AbstractAudioContext::createPanner(ExceptionState& exceptionState)
420 {
421 ASSERT(isMainThread());
422 if (isContextClosed()) {
423 throwExceptionForClosedState(exceptionState);
424 return nullptr;
425 }
426
427 return PannerNode::create(*this, sampleRate());
428 }
429
430 ConvolverNode* AbstractAudioContext::createConvolver(ExceptionState& exceptionSt ate)
431 {
432 ASSERT(isMainThread());
433 if (isContextClosed()) {
434 throwExceptionForClosedState(exceptionState);
435 return nullptr;
436 }
437
438 return ConvolverNode::create(*this, sampleRate());
439 }
440
441 DynamicsCompressorNode* AbstractAudioContext::createDynamicsCompressor(Exception State& exceptionState)
442 {
443 ASSERT(isMainThread());
444 if (isContextClosed()) {
445 throwExceptionForClosedState(exceptionState);
446 return nullptr;
447 }
448
449 return DynamicsCompressorNode::create(*this, sampleRate());
450 }
451
452 AnalyserNode* AbstractAudioContext::createAnalyser(ExceptionState& exceptionStat e)
453 {
454 ASSERT(isMainThread());
455 if (isContextClosed()) {
456 throwExceptionForClosedState(exceptionState);
457 return nullptr;
458 }
459
460 return AnalyserNode::create(*this, sampleRate());
461 }
462
463 GainNode* AbstractAudioContext::createGain(ExceptionState& exceptionState)
464 {
465 ASSERT(isMainThread());
466 if (isContextClosed()) {
467 throwExceptionForClosedState(exceptionState);
468 return nullptr;
469 }
470
471 return GainNode::create(*this, sampleRate());
472 }
473
474 DelayNode* AbstractAudioContext::createDelay(ExceptionState& exceptionState)
475 {
476 const double defaultMaxDelayTime = 1;
477 return createDelay(defaultMaxDelayTime, exceptionState);
478 }
479
480 DelayNode* AbstractAudioContext::createDelay(double maxDelayTime, ExceptionState & exceptionState)
481 {
482 ASSERT(isMainThread());
483 if (isContextClosed()) {
484 throwExceptionForClosedState(exceptionState);
485 return nullptr;
486 }
487
488 return DelayNode::create(*this, sampleRate(), maxDelayTime, exceptionState);
489 }
490
491 ChannelSplitterNode* AbstractAudioContext::createChannelSplitter(ExceptionState& exceptionState)
492 {
493 const unsigned ChannelSplitterDefaultNumberOfOutputs = 6;
494 return createChannelSplitter(ChannelSplitterDefaultNumberOfOutputs, exceptio nState);
495 }
496
497 ChannelSplitterNode* AbstractAudioContext::createChannelSplitter(size_t numberOf Outputs, ExceptionState& exceptionState)
498 {
499 ASSERT(isMainThread());
500
501 if (isContextClosed()) {
502 throwExceptionForClosedState(exceptionState);
503 return nullptr;
504 }
505
506 ChannelSplitterNode* node = ChannelSplitterNode::create(*this, sampleRate(), numberOfOutputs);
507
508 if (!node) {
509 exceptionState.throwDOMException(
510 IndexSizeError,
511 "number of outputs (" + String::number(numberOfOutputs)
512 + ") must be between 1 and "
513 + String::number(AbstractAudioContext::maxNumberOfChannels()) + ".") ;
514 return nullptr;
515 }
516
517 return node;
518 }
519
520 ChannelMergerNode* AbstractAudioContext::createChannelMerger(ExceptionState& exc eptionState)
521 {
522 const unsigned ChannelMergerDefaultNumberOfInputs = 6;
523 return createChannelMerger(ChannelMergerDefaultNumberOfInputs, exceptionStat e);
524 }
525
526 ChannelMergerNode* AbstractAudioContext::createChannelMerger(size_t numberOfInpu ts, ExceptionState& exceptionState)
527 {
528 ASSERT(isMainThread());
529 if (isContextClosed()) {
530 throwExceptionForClosedState(exceptionState);
531 return nullptr;
532 }
533
534 ChannelMergerNode* node = ChannelMergerNode::create(*this, sampleRate(), num berOfInputs);
535
536 if (!node) {
537 exceptionState.throwDOMException(
538 IndexSizeError,
539 ExceptionMessages::indexOutsideRange<size_t>(
540 "number of inputs",
541 numberOfInputs,
542 1,
543 ExceptionMessages::InclusiveBound,
544 AbstractAudioContext::maxNumberOfChannels(),
545 ExceptionMessages::InclusiveBound));
546 return nullptr;
547 }
548
549 return node;
550 }
551
552 OscillatorNode* AbstractAudioContext::createOscillator(ExceptionState& exception State)
553 {
554 ASSERT(isMainThread());
555 if (isContextClosed()) {
556 throwExceptionForClosedState(exceptionState);
557 return nullptr;
558 }
559
560 OscillatorNode* node = OscillatorNode::create(*this, sampleRate());
561
562 // Do not add a reference to this source node now. The reference will be add ed when start() is
563 // called.
564
565 return node;
566 }
567
568 PeriodicWave* AbstractAudioContext::createPeriodicWave(DOMFloat32Array* real, DO MFloat32Array* imag, ExceptionState& exceptionState)
569 {
570 return PeriodicWave::create(sampleRate(), real, imag, false);
571 }
572
573 PeriodicWave* AbstractAudioContext::createPeriodicWave(DOMFloat32Array* real, DO MFloat32Array* imag, const Dictionary& options, ExceptionState& exceptionState)
574 {
575 ASSERT(isMainThread());
576
577 if (isContextClosed()) {
578 throwExceptionForClosedState(exceptionState);
579 return nullptr;
580 }
581
582 if (real->length() != imag->length()) {
583 exceptionState.throwDOMException(
584 IndexSizeError,
585 "length of real array (" + String::number(real->length())
586 + ") and length of imaginary array (" + String::number(imag->length ())
587 + ") must match.");
588 return nullptr;
589 }
590
591 bool isNormalizationDisabled = false;
592 DictionaryHelper::getWithUndefinedOrNullCheck(options, "disableNormalization ", isNormalizationDisabled);
593
594 return PeriodicWave::create(sampleRate(), real, imag, isNormalizationDisable d);
595 }
596
597 IIRFilterNode* AbstractAudioContext::createIIRFilter(Vector<double> feedforwardC oef, Vector<double> feedbackCoef, ExceptionState& exceptionState)
598 {
599 ASSERT(isMainThread());
600
601 if (isContextClosed()) {
602 throwExceptionForClosedState(exceptionState);
603 return nullptr;
604 }
605
606 if (feedbackCoef.size() == 0 || (feedbackCoef.size() > IIRFilter::kMaxOrder + 1)) {
607 exceptionState.throwDOMException(
608 NotSupportedError,
609 ExceptionMessages::indexOutsideRange<size_t>(
610 "number of feedback coefficients",
611 feedbackCoef.size(),
612 1,
613 ExceptionMessages::InclusiveBound,
614 IIRFilter::kMaxOrder + 1,
615 ExceptionMessages::InclusiveBound));
616 return nullptr;
617 }
618
619 if (feedforwardCoef.size() == 0 || (feedforwardCoef.size() > IIRFilter::kMax Order + 1)) {
620 exceptionState.throwDOMException(
621 NotSupportedError,
622 ExceptionMessages::indexOutsideRange<size_t>(
623 "number of feedforward coefficients",
624 feedforwardCoef.size(),
625 1,
626 ExceptionMessages::InclusiveBound,
627 IIRFilter::kMaxOrder + 1,
628 ExceptionMessages::InclusiveBound));
629 return nullptr;
630 }
631
632 if (feedbackCoef[0] == 0) {
633 exceptionState.throwDOMException(
634 InvalidStateError,
635 "First feedback coefficient cannot be zero.");
636 return nullptr;
637 }
638
639 bool hasNonZeroCoef = false;
640
641 for (size_t k = 0; k < feedforwardCoef.size(); ++k) {
642 if (feedforwardCoef[k] != 0) {
643 hasNonZeroCoef = true;
644 break;
645 }
646 }
647
648 if (!hasNonZeroCoef) {
649 exceptionState.throwDOMException(
650 InvalidStateError,
651 "At least one feedforward coefficient must be non-zero.");
652 return nullptr;
653 }
654
655 // Make sure all coefficents are finite.
656 for (size_t k = 0; k < feedforwardCoef.size(); ++k) {
657 double c = feedforwardCoef[k];
658 if (!std::isfinite(c)) {
659 String name = "feedforward coefficient " + String::number(k);
660 exceptionState.throwDOMException(
661 InvalidStateError,
662 ExceptionMessages::notAFiniteNumber(c, name.ascii().data()));
663 return nullptr;
664 }
665 }
666
667 for (size_t k = 0; k < feedbackCoef.size(); ++k) {
668 double c = feedbackCoef[k];
669 if (!std::isfinite(c)) {
670 String name = "feedback coefficient " + String::number(k);
671 exceptionState.throwDOMException(
672 InvalidStateError,
673 ExceptionMessages::notAFiniteNumber(c, name.ascii().data()));
674 return nullptr;
675 }
676 }
677
678 return IIRFilterNode::create(*this, sampleRate(), feedforwardCoef, feedbackC oef);
679 }
680
681 PeriodicWave* AbstractAudioContext::periodicWave(int type)
682 {
683 switch (type) {
684 case OscillatorHandler::SINE:
685 // Initialize the table if necessary
686 if (!m_periodicWaveSine)
687 m_periodicWaveSine = PeriodicWave::createSine(sampleRate());
688 return m_periodicWaveSine;
689 case OscillatorHandler::SQUARE:
690 // Initialize the table if necessary
691 if (!m_periodicWaveSquare)
692 m_periodicWaveSquare = PeriodicWave::createSquare(sampleRate());
693 return m_periodicWaveSquare;
694 case OscillatorHandler::SAWTOOTH:
695 // Initialize the table if necessary
696 if (!m_periodicWaveSawtooth)
697 m_periodicWaveSawtooth = PeriodicWave::createSawtooth(sampleRate());
698 return m_periodicWaveSawtooth;
699 case OscillatorHandler::TRIANGLE:
700 // Initialize the table if necessary
701 if (!m_periodicWaveTriangle)
702 m_periodicWaveTriangle = PeriodicWave::createTriangle(sampleRate());
703 return m_periodicWaveTriangle;
704 default:
705 ASSERT_NOT_REACHED();
706 return nullptr;
707 }
708 }
709
710 String AbstractAudioContext::state() const
711 {
712 // These strings had better match the strings for AudioContextState in Audio Context.idl.
713 switch (m_contextState) {
714 case Suspended:
715 return "suspended";
716 case Running:
717 return "running";
718 case Closed:
719 return "closed";
720 }
721 ASSERT_NOT_REACHED();
722 return "";
723 }
724
725 void AbstractAudioContext::setContextState(AudioContextState newState)
726 {
727 ASSERT(isMainThread());
728
729 // Validate the transitions. The valid transitions are Suspended->Running, Running->Suspended,
730 // and anything->Closed.
731 switch (newState) {
732 case Suspended:
733 ASSERT(m_contextState == Running);
734 break;
735 case Running:
736 ASSERT(m_contextState == Suspended);
737 break;
738 case Closed:
739 ASSERT(m_contextState != Closed);
740 break;
741 }
742
743 if (newState == m_contextState) {
744 // ASSERTs above failed; just return.
745 return;
746 }
747
748 m_contextState = newState;
749
750 // Notify context that state changed
751 if (getExecutionContext())
752 getExecutionContext()->postTask(BLINK_FROM_HERE, createSameThreadTask(&A bstractAudioContext::notifyStateChange, this));
753 }
754
755 void AbstractAudioContext::notifyStateChange()
756 {
757 dispatchEvent(Event::create(EventTypeNames::statechange));
758 }
759
760 void AbstractAudioContext::notifySourceNodeFinishedProcessing(AudioHandler* hand ler)
761 {
762 ASSERT(isAudioThread());
763 m_finishedSourceHandlers.append(handler);
764 }
765
766 void AbstractAudioContext::releaseFinishedSourceNodes()
767 {
768 ASSERT(isGraphOwner());
769 ASSERT(isAudioThread());
770 for (AudioHandler* handler : m_finishedSourceHandlers) {
771 for (unsigned i = 0; i < m_activeSourceNodes.size(); ++i) {
772 if (handler == &m_activeSourceNodes[i]->handler()) {
773 handler->breakConnection();
774 m_activeSourceNodes.remove(i);
775 break;
776 }
777 }
778 }
779
780 m_finishedSourceHandlers.clear();
781 }
782
783 void AbstractAudioContext::notifySourceNodeStartedProcessing(AudioNode* node)
784 {
785 ASSERT(isMainThread());
786 AutoLocker locker(this);
787
788 m_activeSourceNodes.append(node);
789 node->handler().makeConnection();
790 }
791
792 void AbstractAudioContext::releaseActiveSourceNodes()
793 {
794 ASSERT(isMainThread());
795 for (auto& sourceNode : m_activeSourceNodes)
796 sourceNode->handler().breakConnection();
797
798 m_activeSourceNodes.clear();
799 }
800
801 void AbstractAudioContext::handleStoppableSourceNodes()
802 {
803 ASSERT(isGraphOwner());
804
805 // Find AudioBufferSourceNodes to see if we can stop playing them.
806 for (AudioNode* node : m_activeSourceNodes) {
807 if (node->handler().getNodeType() == AudioHandler::NodeTypeAudioBufferSo urce) {
808 AudioBufferSourceNode* sourceNode = static_cast<AudioBufferSourceNod e*>(node);
809 sourceNode->audioBufferSourceHandler().handleStoppableSourceNode();
810 }
811 }
812 }
813
814 void AbstractAudioContext::handlePreRenderTasks()
815 {
816 ASSERT(isAudioThread());
817
818 // At the beginning of every render quantum, try to update the internal rend ering graph state (from main thread changes).
819 // It's OK if the tryLock() fails, we'll just take slightly longer to pick u p the changes.
820 if (tryLock()) {
821 deferredTaskHandler().handleDeferredTasks();
822
823 resolvePromisesForResume();
824
825 // Check to see if source nodes can be stopped because the end time has passed.
826 handleStoppableSourceNodes();
827
828 unlock();
829 }
830 }
831
832 void AbstractAudioContext::handlePostRenderTasks()
833 {
834 ASSERT(isAudioThread());
835
836 // Must use a tryLock() here too. Don't worry, the lock will very rarely be contended and this method is called frequently.
837 // The worst that can happen is that there will be some nodes which will tak e slightly longer than usual to be deleted or removed
838 // from the render graph (in which case they'll render silence).
839 if (tryLock()) {
840 // Take care of AudioNode tasks where the tryLock() failed previously.
841 deferredTaskHandler().breakConnections();
842
843 // Dynamically clean up nodes which are no longer needed.
844 releaseFinishedSourceNodes();
845
846 deferredTaskHandler().handleDeferredTasks();
847 deferredTaskHandler().requestToDeleteHandlersOnMainThread();
848
849 unlock();
850 }
851 }
852
853 void AbstractAudioContext::resolvePromisesForResumeOnMainThread()
854 {
855 ASSERT(isMainThread());
856 AutoLocker locker(this);
857
858 for (auto& resolver : m_resumeResolvers) {
859 if (m_contextState == Closed) {
860 resolver->reject(
861 DOMException::create(InvalidStateError, "Cannot resume a context that has been closed"));
862 } else {
863 resolver->resolve();
864 }
865 }
866
867 m_resumeResolvers.clear();
868 m_isResolvingResumePromises = false;
869 }
870
871 void AbstractAudioContext::resolvePromisesForResume()
872 {
873 // This runs inside the AbstractAudioContext's lock when handling pre-render tasks.
874 ASSERT(isAudioThread());
875 ASSERT(isGraphOwner());
876
877 // Resolve any pending promises created by resume(). Only do this if we have n't already started
878 // resolving these promises. This gets called very often and it takes some t ime to resolve the
879 // promises in the main thread.
880 if (!m_isResolvingResumePromises && m_resumeResolvers.size() > 0) {
881 m_isResolvingResumePromises = true;
882 Platform::current()->mainThread()->getWebTaskRunner()->postTask(BLINK_FR OM_HERE, threadSafeBind(&AbstractAudioContext::resolvePromisesForResumeOnMainThr ead, this));
883 }
884 }
885
886 void AbstractAudioContext::rejectPendingResolvers()
887 {
888 ASSERT(isMainThread());
889
890 // Audio context is closing down so reject any resume promises that are stil l pending.
891
892 for (auto& resolver : m_resumeResolvers) {
893 resolver->reject(DOMException::create(InvalidStateError, "Audio context is going away"));
894 }
895 m_resumeResolvers.clear();
896 m_isResolvingResumePromises = false;
897
898 // Now reject any pending decodeAudioData resolvers
899 for (auto& resolver : m_decodeAudioResolvers)
900 resolver->reject(DOMException::create(InvalidStateError, "Audio context is going away"));
901 m_decodeAudioResolvers.clear();
902 }
903
904 const AtomicString& AbstractAudioContext::interfaceName() const
905 {
906 return EventTargetNames::AudioContext;
907 }
908
909 ExecutionContext* AbstractAudioContext::getExecutionContext() const
910 {
911 return ActiveDOMObject::getExecutionContext();
912 }
913
914 void AbstractAudioContext::startRendering()
915 {
916 // This is called for both online and offline contexts.
917 ASSERT(isMainThread());
918 ASSERT(m_destinationNode);
919
920 if (m_contextState == Suspended) {
921 destination()->audioDestinationHandler().startRendering();
922 setContextState(Running);
923 }
924 }
925
926 DEFINE_TRACE(AbstractAudioContext)
927 {
928 visitor->trace(m_destinationNode);
929 visitor->trace(m_listener);
930 // trace() can be called in AbstractAudioContext constructor, and
931 // m_contextGraphMutex might be unavailable.
932 if (m_didInitializeContextGraphMutex) {
933 AutoLocker lock(this);
934 visitor->trace(m_activeSourceNodes);
935 } else {
936 visitor->trace(m_activeSourceNodes);
937 }
938 visitor->trace(m_resumeResolvers);
939 visitor->trace(m_decodeAudioResolvers);
940
941 visitor->trace(m_periodicWaveSine);
942 visitor->trace(m_periodicWaveSquare);
943 visitor->trace(m_periodicWaveSawtooth);
944 visitor->trace(m_periodicWaveTriangle);
945 RefCountedGarbageCollectedEventTargetWithInlineData<AbstractAudioContext>::t race(visitor);
946 ActiveDOMObject::trace(visitor);
947 }
948
949 SecurityOrigin* AbstractAudioContext::getSecurityOrigin() const
950 {
951 if (getExecutionContext())
952 return getExecutionContext()->getSecurityOrigin();
953
954 return nullptr;
955 }
956
957 } // namespace blink
958
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698