Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(29)

Side by Side Diff: third_party/WebKit/Source/modules/webaudio/AbstractAudioContext.h

Issue 1865583002: Implement BaseAudioContext (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Created 4 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 /*
2 * Copyright (C) 2010, Google Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND AN Y
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
15 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
16 * DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR AN Y
17 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
18 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
19 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND O N
20 * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
21 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
22 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
23 */
24
25 #ifndef AbstractAudioContext_h
26 #define AbstractAudioContext_h
27
28 #include "bindings/core/v8/ActiveScriptWrappable.h"
29 #include "bindings/core/v8/ScriptPromise.h"
30 #include "bindings/core/v8/ScriptPromiseResolver.h"
31 #include "core/dom/ActiveDOMObject.h"
32 #include "core/dom/DOMTypedArray.h"
33 #include "core/events/EventListener.h"
34 #include "modules/EventTargetModules.h"
35 #include "modules/ModulesExport.h"
36 #include "modules/webaudio/AsyncAudioDecoder.h"
37 #include "modules/webaudio/AudioDestinationNode.h"
38 #include "modules/webaudio/DeferredTaskHandler.h"
39 #include "modules/webaudio/IIRFilterNode.h"
40 #include "platform/audio/AudioBus.h"
41 #include "platform/heap/Handle.h"
42 #include "wtf/HashSet.h"
43 #include "wtf/RefPtr.h"
44 #include "wtf/Threading.h"
45 #include "wtf/Vector.h"
46 #include "wtf/build_config.h"
47
48 namespace blink {
49
50 class AnalyserNode;
51 class AudioBuffer;
52 class AudioBufferCallback;
53 class AudioBufferSourceNode;
54 class AudioListener;
55 class AudioSummingJunction;
56 class BiquadFilterNode;
57 class ChannelMergerNode;
58 class ChannelSplitterNode;
59 class ConvolverNode;
60 class DelayNode;
61 class Dictionary;
62 class Document;
63 class DynamicsCompressorNode;
64 class ExceptionState;
65 class GainNode;
66 class HTMLMediaElement;
67 class IIRFilterNode;
68 class MediaElementAudioSourceNode;
69 class MediaStreamAudioDestinationNode;
70 class MediaStreamAudioSourceNode;
71 class OscillatorNode;
72 class PannerNode;
73 class PeriodicWave;
74 class ScriptProcessorNode;
75 class ScriptPromiseResolver;
76 class ScriptState;
77 class SecurityOrigin;
78 class StereoPannerNode;
79 class WaveShaperNode;
80
81 // AbstractAudioContext is the cornerstone of the web audio API and all AudioNod es are created from it.
82 // For thread safety between the audio thread and the main thread, it has a rend ering graph locking mechanism.
83
84 class MODULES_EXPORT AbstractAudioContext : public RefCountedGarbageCollectedEve ntTargetWithInlineData<AbstractAudioContext>, public ActiveScriptWrappable, publ ic ActiveDOMObject {
85 REFCOUNTED_GARBAGE_COLLECTED_EVENT_TARGET(AbstractAudioContext);
86 USING_GARBAGE_COLLECTED_MIXIN(AbstractAudioContext);
87 DEFINE_WRAPPERTYPEINFO();
88 public:
89 // The state of an audio context. On creation, the state is Suspended. The state is Running if
90 // audio is being processed (audio graph is being pulled for data). The stat e is Closed if the
91 // audio context has been closed. The valid transitions are from Suspended to either Running or
92 // Closed; Running to Suspended or Closed. Once Closed, there are no valid t ransitions.
93 enum AudioContextState {
94 Suspended,
95 Running,
96 Closed
97 };
98
99 // Create an AudioContext for rendering to the audio hardware.
100 static AbstractAudioContext* create(Document&, ExceptionState&);
101
102 ~AbstractAudioContext() override;
103
104 DECLARE_VIRTUAL_TRACE();
105
106 // Is the destination node initialized and ready to handle audio?
107 bool isDestinationInitialized() const
108 {
109 AudioDestinationNode* dest = destination();
110 return dest ? dest->audioDestinationHandler().isInitialized() : false;
111 }
112
113 // Document notification
114 void stop() final;
115 bool hasPendingActivity() const final;
116
117 AudioDestinationNode* destination() const { return m_destinationNode.get(); }
118
119 size_t currentSampleFrame() const
120 {
121 return m_destinationNode ? m_destinationNode->audioDestinationHandler(). currentSampleFrame() : 0;
122 }
123
124 double currentTime() const
125 {
126 return m_destinationNode ? m_destinationNode->audioDestinationHandler(). currentTime() : 0;
127 }
128
129 float sampleRate() const { return m_destinationNode ? m_destinationNode->han dler().sampleRate() : 0; }
130
131 String state() const;
132 AudioContextState contextState() const { return m_contextState; }
133
134 AudioBuffer* createBuffer(unsigned numberOfChannels, size_t numberOfFrames, float sampleRate, ExceptionState&);
135
136 // Asynchronous audio file data decoding.
137 ScriptPromise decodeAudioData(ScriptState*, DOMArrayBuffer* audioData, Audio BufferCallback* successCallback, AudioBufferCallback* errorCallback, ExceptionSt ate&);
138
139 // Handles the promise and callbacks when |decodeAudioData| is finished deco ding.
140 void handleDecodeAudioData(AudioBuffer*, ScriptPromiseResolver*, AudioBuffer Callback* successCallback, AudioBufferCallback* errorCallback);
141
142 AudioListener* listener() { return m_listener.get(); }
143
144 virtual bool hasRealtimeConstraint() = 0;
145
146 // The AudioNode create methods are called on the main thread (from JavaScri pt).
147 AudioBufferSourceNode* createBufferSource(ExceptionState&);
148 MediaElementAudioSourceNode* createMediaElementSource(HTMLMediaElement*, Exc eptionState&);
149 MediaStreamAudioSourceNode* createMediaStreamSource(MediaStream*, ExceptionS tate&);
150 MediaStreamAudioDestinationNode* createMediaStreamDestination(ExceptionState &);
151 GainNode* createGain(ExceptionState&);
152 BiquadFilterNode* createBiquadFilter(ExceptionState&);
153 WaveShaperNode* createWaveShaper(ExceptionState&);
154 DelayNode* createDelay(ExceptionState&);
155 DelayNode* createDelay(double maxDelayTime, ExceptionState&);
156 PannerNode* createPanner(ExceptionState&);
157 ConvolverNode* createConvolver(ExceptionState&);
158 DynamicsCompressorNode* createDynamicsCompressor(ExceptionState&);
159 AnalyserNode* createAnalyser(ExceptionState&);
160 ScriptProcessorNode* createScriptProcessor(ExceptionState&);
161 ScriptProcessorNode* createScriptProcessor(size_t bufferSize, ExceptionState &);
162 ScriptProcessorNode* createScriptProcessor(size_t bufferSize, size_t numberO fInputChannels, ExceptionState&);
163 ScriptProcessorNode* createScriptProcessor(size_t bufferSize, size_t numberO fInputChannels, size_t numberOfOutputChannels, ExceptionState&);
164 StereoPannerNode* createStereoPanner(ExceptionState&);
165 ChannelSplitterNode* createChannelSplitter(ExceptionState&);
166 ChannelSplitterNode* createChannelSplitter(size_t numberOfOutputs, Exception State&);
167 ChannelMergerNode* createChannelMerger(ExceptionState&);
168 ChannelMergerNode* createChannelMerger(size_t numberOfInputs, ExceptionState &);
169 OscillatorNode* createOscillator(ExceptionState&);
170 PeriodicWave* createPeriodicWave(DOMFloat32Array* real, DOMFloat32Array* ima g, ExceptionState&);
171 PeriodicWave* createPeriodicWave(DOMFloat32Array* real, DOMFloat32Array* ima g, const Dictionary&, ExceptionState&);
172
173 // Close
174 virtual ScriptPromise closeContext(ScriptState*) = 0;
175
176 // Suspend
177 virtual ScriptPromise suspendContext(ScriptState*) = 0;
178
179 // Resume
180 virtual ScriptPromise resumeContext(ScriptState*) = 0;
181
182 // IIRFilter
183 IIRFilterNode* createIIRFilter(Vector<double> feedforwardCoef, Vector<double > feedbackCoef,
184 ExceptionState&);
185
186 // When a source node has started processing and needs to be protected,
187 // this method tells the context to protect the node.
188 //
189 // The context itself keeps a reference to all source nodes. The source
190 // nodes, then reference all nodes they're connected to. In turn, these
191 // nodes reference all nodes they're connected to. All nodes are ultimately
192 // connected to the AudioDestinationNode. When the context release a source
193 // node, it will be deactivated from the rendering graph along with all
194 // other nodes it is uniquely connected to.
195 void notifySourceNodeStartedProcessing(AudioNode*);
196 // When a source node has no more processing to do (has finished playing),
197 // this method tells the context to release the corresponding node.
198 void notifySourceNodeFinishedProcessing(AudioHandler*);
199
200 // Called at the start of each render quantum.
201 void handlePreRenderTasks();
202
203 // Called at the end of each render quantum.
204 void handlePostRenderTasks();
205
206 // Called periodically at the end of each render quantum to release finished
207 // source nodes.
208 void releaseFinishedSourceNodes();
209
210 // Keeps track of the number of connections made.
211 void incrementConnectionCount()
212 {
213 ASSERT(isMainThread());
214 m_connectionCount++;
215 }
216
217 unsigned connectionCount() const { return m_connectionCount; }
218
219 DeferredTaskHandler& deferredTaskHandler() const { return *m_deferredTaskHan dler; }
220 //
221 // Thread Safety and Graph Locking:
222 //
223 // The following functions call corresponding functions of
224 // DeferredTaskHandler.
225 bool isAudioThread() const { return deferredTaskHandler().isAudioThread(); }
226 void lock() { deferredTaskHandler().lock(); }
227 bool tryLock() { return deferredTaskHandler().tryLock(); }
228 void unlock() { deferredTaskHandler().unlock(); }
229 #if ENABLE(ASSERT)
230 // Returns true if this thread owns the context's lock.
231 bool isGraphOwner() { return deferredTaskHandler().isGraphOwner(); }
232 #endif
233 using AutoLocker = DeferredTaskHandler::AutoLocker;
234
235 // Returns the maximum numuber of channels we can support.
236 static unsigned maxNumberOfChannels() { return MaxNumberOfChannels;}
237
238 // EventTarget
239 const AtomicString& interfaceName() const final;
240 ExecutionContext* getExecutionContext() const final;
241
242 DEFINE_ATTRIBUTE_EVENT_LISTENER(statechange);
243
244 void startRendering();
245 void notifyStateChange();
246
247 // A context is considered closed if:
248 // - closeContext() has been called.
249 // - it has been stopped by its execution context.
250 virtual bool isContextClosed() const { return m_isCleared; }
251
252 // Get the security origin for this audio context.
253 SecurityOrigin* getSecurityOrigin() const;
254
255 // Get the PeriodicWave for the specified oscillator type. The table is ini tialized internally
256 // if necessary.
257 PeriodicWave* periodicWave(int type);
258 protected:
259 explicit AbstractAudioContext(Document*);
260 AbstractAudioContext(Document*, unsigned numberOfChannels, size_t numberOfFr ames, float sampleRate);
261
262 void initialize();
263 void uninitialize();
264
265 void setContextState(AudioContextState);
266
267 virtual void didClose() {}
268
269 // Tries to handle AudioBufferSourceNodes that were started but became disco nnected or was never
270 // connected. Because these never get pulled anymore, they will stay around forever. So if we
271 // can, try to stop them so they can be collected.
272 void handleStoppableSourceNodes();
273
274 Member<AudioDestinationNode> m_destinationNode;
275
276 // FIXME(dominicc): Move m_resumeResolvers to AudioContext, because only
277 // it creates these Promises.
278 // Vector of promises created by resume(). It takes time to handle them, so we collect all of
279 // the promises here until they can be resolved or rejected.
280 HeapVector<Member<ScriptPromiseResolver>> m_resumeResolvers;
281
282 void setClosedContextSampleRate(float newSampleRate) { m_closedContextSample Rate = newSampleRate; }
283 float closedContextSampleRate() const { return m_closedContextSampleRate; }
284 private:
285 bool m_isCleared;
286 void clear();
287
288 void throwExceptionForClosedState(ExceptionState&);
289
290 // When the context goes away, there might still be some sources which
291 // haven't finished playing. Make sure to release them here.
292 void releaseActiveSourceNodes();
293
294 Member<AudioListener> m_listener;
295
296 // Only accessed in the audio thread.
297 // These raw pointers are safe because AudioSourceNodes in
298 // m_activeSourceNodes own them.
299 Vector<AudioHandler*> m_finishedSourceHandlers;
300
301 // List of source nodes. This is either accessed when the graph lock is
302 // held, or on the main thread when the audio thread has finished.
303 // Oilpan: This Vector holds connection references. We must call
304 // AudioHandler::makeConnection when we add an AudioNode to this, and must
305 // call AudioHandler::breakConnection() when we remove an AudioNode from
306 // this.
307 HeapVector<Member<AudioNode>> m_activeSourceNodes;
308
309 // FIXME(dominicc): Move these to AudioContext because only
310 // it creates these Promises.
311 // Handle Promises for resume() and suspend()
312 void resolvePromisesForResume();
313 void resolvePromisesForResumeOnMainThread();
314
315 void rejectPendingResolvers();
316
317 // True if we're in the process of resolving promises for resume(). Resolvi ng can take some
318 // time and the audio context process loop is very fast, so we don't want to call resolve an
319 // excessive number of times.
320 bool m_isResolvingResumePromises;
321
322 unsigned m_connectionCount;
323
324 // Graph locking.
325 bool m_didInitializeContextGraphMutex;
326 RefPtr<DeferredTaskHandler> m_deferredTaskHandler;
327
328 // The state of the AbstractAudioContext.
329 AudioContextState m_contextState;
330
331 AsyncAudioDecoder m_audioDecoder;
332
333 // When a context is closed, the sample rate is cleared. But decodeAudioDat a can be called
334 // after the context has been closed and it needs the sample rate. When the context is closed,
335 // the sample rate is saved here.
336 float m_closedContextSampleRate;
337
338 // Vector of promises created by decodeAudioData. This keeps the resolvers alive until
339 // decodeAudioData finishes decoding and can tell the main thread to resolve them.
340 HeapHashSet<Member<ScriptPromiseResolver>> m_decodeAudioResolvers;
341
342 // PeriodicWave's for the builtin oscillator types. These only depend on th e sample rate. so
343 // they can be shared with all OscillatorNodes in the context. To conserve memory, these are
344 // lazily initiialized on first use.
345 Member<PeriodicWave> m_periodicWaveSine;
346 Member<PeriodicWave> m_periodicWaveSquare;
347 Member<PeriodicWave> m_periodicWaveSawtooth;
348 Member<PeriodicWave> m_periodicWaveTriangle;
349
350 // This is considering 32 is large enough for multiple channels audio.
351 // It is somewhat arbitrary and could be increased if necessary.
352 enum { MaxNumberOfChannels = 32 };
353 };
354
355 } // namespace blink
356
357 #endif // AbstractAudioContext_h
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698