OLD | NEW |
1 /* | 1 /* |
2 * Copyright (C) 2010, Google Inc. All rights reserved. | 2 * Copyright (C) 2010, Google Inc. All rights reserved. |
3 * | 3 * |
4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
5 * modification, are permitted provided that the following conditions | 5 * modification, are permitted provided that the following conditions |
6 * are met: | 6 * are met: |
7 * 1. Redistributions of source code must retain the above copyright | 7 * 1. Redistributions of source code must retain the above copyright |
8 * notice, this list of conditions and the following disclaimer. | 8 * notice, this list of conditions and the following disclaimer. |
9 * 2. Redistributions in binary form must reproduce the above copyright | 9 * 2. Redistributions in binary form must reproduce the above copyright |
10 * notice, this list of conditions and the following disclaimer in the | 10 * notice, this list of conditions and the following disclaimer in the |
11 * documentation and/or other materials provided with the distribution. | 11 * documentation and/or other materials provided with the distribution. |
12 * | 12 * |
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND AN
Y | 13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND AN
Y |
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED | 14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED |
15 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE | 15 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE |
16 * DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR AN
Y | 16 * DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR AN
Y |
17 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES | 17 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES |
18 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; | 18 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; |
19 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND O
N | 19 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND O
N |
20 * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 20 * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
21 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS | 21 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS |
22 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 22 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
23 */ | 23 */ |
24 | 24 |
25 #ifndef AudioContext_h | 25 #ifndef AbstractAudioContext_h |
26 #define AudioContext_h | 26 #define AbstractAudioContext_h |
27 | 27 |
28 #include "bindings/core/v8/ScriptPromise.h" | 28 #include "bindings/core/v8/ScriptPromise.h" |
29 #include "bindings/core/v8/ScriptPromiseResolver.h" | 29 #include "bindings/core/v8/ScriptPromiseResolver.h" |
30 #include "core/dom/ActiveDOMObject.h" | 30 #include "core/dom/ActiveDOMObject.h" |
31 #include "core/dom/DOMTypedArray.h" | 31 #include "core/dom/DOMTypedArray.h" |
32 #include "core/events/EventListener.h" | 32 #include "core/events/EventListener.h" |
33 #include "modules/EventTargetModules.h" | 33 #include "modules/EventTargetModules.h" |
34 #include "modules/ModulesExport.h" | 34 #include "modules/ModulesExport.h" |
35 #include "modules/webaudio/AsyncAudioDecoder.h" | 35 #include "modules/webaudio/AsyncAudioDecoder.h" |
36 #include "modules/webaudio/AudioDestinationNode.h" | 36 #include "modules/webaudio/AudioDestinationNode.h" |
(...skipping 30 matching lines...) Expand all Loading... |
67 class OscillatorNode; | 67 class OscillatorNode; |
68 class PannerNode; | 68 class PannerNode; |
69 class PeriodicWave; | 69 class PeriodicWave; |
70 class ScriptProcessorNode; | 70 class ScriptProcessorNode; |
71 class ScriptPromiseResolver; | 71 class ScriptPromiseResolver; |
72 class ScriptState; | 72 class ScriptState; |
73 class SecurityOrigin; | 73 class SecurityOrigin; |
74 class StereoPannerNode; | 74 class StereoPannerNode; |
75 class WaveShaperNode; | 75 class WaveShaperNode; |
76 | 76 |
77 // AudioContext is the cornerstone of the web audio API and all AudioNodes are c
reated from it. | 77 // AbstractAudioContext is the cornerstone of the web audio API and all AudioNod
es are created from it. |
78 // For thread safety between the audio thread and the main thread, it has a rend
ering graph locking mechanism. | 78 // For thread safety between the audio thread and the main thread, it has a rend
ering graph locking mechanism. |
79 | 79 |
80 class MODULES_EXPORT AudioContext : public RefCountedGarbageCollectedEventTarget
WithInlineData<AudioContext>, public ActiveDOMObject { | 80 class MODULES_EXPORT AbstractAudioContext : public RefCountedGarbageCollectedEve
ntTargetWithInlineData<AbstractAudioContext>, public ActiveDOMObject { |
81 REFCOUNTED_GARBAGE_COLLECTED_EVENT_TARGET(AudioContext); | 81 REFCOUNTED_GARBAGE_COLLECTED_EVENT_TARGET(AbstractAudioContext); |
82 WILL_BE_USING_GARBAGE_COLLECTED_MIXIN(AudioContext); | 82 WILL_BE_USING_GARBAGE_COLLECTED_MIXIN(AbstractAudioContext); |
83 DEFINE_WRAPPERTYPEINFO(); | 83 DEFINE_WRAPPERTYPEINFO(); |
84 public: | 84 public: |
85 // The state of an audio context. On creation, the state is Suspended. The
state is Running if | 85 // The state of an audio context. On creation, the state is Suspended. The
state is Running if |
86 // audio is being processed (audio graph is being pulled for data). The stat
e is Closed if the | 86 // audio is being processed (audio graph is being pulled for data). The stat
e is Closed if the |
87 // audio context has been closed. The valid transitions are from Suspended
to either Running or | 87 // audio context has been closed. The valid transitions are from Suspended
to either Running or |
88 // Closed; Running to Suspended or Closed. Once Closed, there are no valid t
ransitions. | 88 // Closed; Running to Suspended or Closed. Once Closed, there are no valid t
ransitions. |
89 enum AudioContextState { | 89 enum AudioContextState { |
90 Suspended, | 90 Suspended, |
91 Running, | 91 Running, |
92 Closed | 92 Closed |
93 }; | 93 }; |
94 | 94 |
95 // Create an AudioContext for rendering to the audio hardware. | 95 // Create an AudioContext for rendering to the audio hardware. |
96 static AudioContext* create(Document&, ExceptionState&); | 96 static AbstractAudioContext* create(Document&, ExceptionState&); |
97 | 97 |
98 ~AudioContext() override; | 98 ~AbstractAudioContext() override; |
99 | 99 |
100 DECLARE_VIRTUAL_TRACE(); | 100 DECLARE_VIRTUAL_TRACE(); |
101 | 101 |
102 bool isInitialized() const { return m_isInitialized; } | 102 bool isInitialized() const { return m_isInitialized; } |
103 bool isOfflineContext() { return m_isOfflineContext; } | |
104 | 103 |
105 // Document notification | 104 // Document notification |
106 void stop() final; | 105 void stop() final; |
107 bool hasPendingActivity() const override; | 106 bool hasPendingActivity() const override; |
108 | 107 |
109 AudioDestinationNode* destination() { return m_destinationNode.get(); } | 108 AudioDestinationNode* destination() { return m_destinationNode.get(); } |
110 | 109 |
111 size_t currentSampleFrame() const | 110 size_t currentSampleFrame() const |
112 { | 111 { |
113 return m_destinationNode ? m_destinationNode->audioDestinationHandler().
currentSampleFrame() : 0; | 112 return m_destinationNode ? m_destinationNode->audioDestinationHandler().
currentSampleFrame() : 0; |
114 } | 113 } |
115 | 114 |
116 double currentTime() const | 115 double currentTime() const |
117 { | 116 { |
118 return m_destinationNode ? m_destinationNode->audioDestinationHandler().
currentTime() : 0; | 117 return m_destinationNode ? m_destinationNode->audioDestinationHandler().
currentTime() : 0; |
119 } | 118 } |
120 | 119 |
121 float sampleRate() const { return m_destinationNode ? m_destinationNode->han
dler().sampleRate() : 0; } | 120 float sampleRate() const { return m_destinationNode ? m_destinationNode->han
dler().sampleRate() : 0; } |
122 | 121 |
123 String state() const; | 122 String state() const; |
124 AudioContextState contextState() const { return m_contextState; } | 123 AudioContextState contextState() const { return m_contextState; } |
125 | 124 |
126 AudioBuffer* createBuffer(unsigned numberOfChannels, size_t numberOfFrames,
float sampleRate, ExceptionState&); | 125 AudioBuffer* createBuffer(unsigned numberOfChannels, size_t numberOfFrames,
float sampleRate, ExceptionState&); |
127 | 126 |
128 // Asynchronous audio file data decoding. | 127 // Asynchronous audio file data decoding. |
129 void decodeAudioData(DOMArrayBuffer*, AudioBufferCallback*, AudioBufferCallb
ack*, ExceptionState&); | 128 void decodeAudioData(DOMArrayBuffer*, AudioBufferCallback*, AudioBufferCallb
ack*, ExceptionState&); |
130 | 129 |
131 AudioListener* listener() { return m_listener.get(); } | 130 AudioListener* listener() { return m_listener.get(); } |
132 | 131 |
| 132 virtual bool hasRealtimeConstraint() = 0; |
| 133 |
133 // The AudioNode create methods are called on the main thread (from JavaScri
pt). | 134 // The AudioNode create methods are called on the main thread (from JavaScri
pt). |
134 AudioBufferSourceNode* createBufferSource(ExceptionState&); | 135 AudioBufferSourceNode* createBufferSource(ExceptionState&); |
135 MediaElementAudioSourceNode* createMediaElementSource(HTMLMediaElement*, Exc
eptionState&); | 136 MediaElementAudioSourceNode* createMediaElementSource(HTMLMediaElement*, Exc
eptionState&); |
136 MediaStreamAudioSourceNode* createMediaStreamSource(MediaStream*, ExceptionS
tate&); | 137 MediaStreamAudioSourceNode* createMediaStreamSource(MediaStream*, ExceptionS
tate&); |
137 MediaStreamAudioDestinationNode* createMediaStreamDestination(ExceptionState
&); | 138 MediaStreamAudioDestinationNode* createMediaStreamDestination(ExceptionState
&); |
138 GainNode* createGain(ExceptionState&); | 139 GainNode* createGain(ExceptionState&); |
139 BiquadFilterNode* createBiquadFilter(ExceptionState&); | 140 BiquadFilterNode* createBiquadFilter(ExceptionState&); |
140 WaveShaperNode* createWaveShaper(ExceptionState&); | 141 WaveShaperNode* createWaveShaper(ExceptionState&); |
141 DelayNode* createDelay(ExceptionState&); | 142 DelayNode* createDelay(ExceptionState&); |
142 DelayNode* createDelay(double maxDelayTime, ExceptionState&); | 143 DelayNode* createDelay(double maxDelayTime, ExceptionState&); |
143 PannerNode* createPanner(ExceptionState&); | 144 PannerNode* createPanner(ExceptionState&); |
144 ConvolverNode* createConvolver(ExceptionState&); | 145 ConvolverNode* createConvolver(ExceptionState&); |
145 DynamicsCompressorNode* createDynamicsCompressor(ExceptionState&); | 146 DynamicsCompressorNode* createDynamicsCompressor(ExceptionState&); |
146 AnalyserNode* createAnalyser(ExceptionState&); | 147 AnalyserNode* createAnalyser(ExceptionState&); |
147 ScriptProcessorNode* createScriptProcessor(ExceptionState&); | 148 ScriptProcessorNode* createScriptProcessor(ExceptionState&); |
148 ScriptProcessorNode* createScriptProcessor(size_t bufferSize, ExceptionState
&); | 149 ScriptProcessorNode* createScriptProcessor(size_t bufferSize, ExceptionState
&); |
149 ScriptProcessorNode* createScriptProcessor(size_t bufferSize, size_t numberO
fInputChannels, ExceptionState&); | 150 ScriptProcessorNode* createScriptProcessor(size_t bufferSize, size_t numberO
fInputChannels, ExceptionState&); |
150 ScriptProcessorNode* createScriptProcessor(size_t bufferSize, size_t numberO
fInputChannels, size_t numberOfOutputChannels, ExceptionState&); | 151 ScriptProcessorNode* createScriptProcessor(size_t bufferSize, size_t numberO
fInputChannels, size_t numberOfOutputChannels, ExceptionState&); |
151 StereoPannerNode* createStereoPanner(ExceptionState&); | 152 StereoPannerNode* createStereoPanner(ExceptionState&); |
152 ChannelSplitterNode* createChannelSplitter(ExceptionState&); | 153 ChannelSplitterNode* createChannelSplitter(ExceptionState&); |
153 ChannelSplitterNode* createChannelSplitter(size_t numberOfOutputs, Exception
State&); | 154 ChannelSplitterNode* createChannelSplitter(size_t numberOfOutputs, Exception
State&); |
154 ChannelMergerNode* createChannelMerger(ExceptionState&); | 155 ChannelMergerNode* createChannelMerger(ExceptionState&); |
155 ChannelMergerNode* createChannelMerger(size_t numberOfInputs, ExceptionState
&); | 156 ChannelMergerNode* createChannelMerger(size_t numberOfInputs, ExceptionState
&); |
156 OscillatorNode* createOscillator(ExceptionState&); | 157 OscillatorNode* createOscillator(ExceptionState&); |
157 PeriodicWave* createPeriodicWave(DOMFloat32Array* real, DOMFloat32Array* ima
g, ExceptionState&); | 158 PeriodicWave* createPeriodicWave(DOMFloat32Array* real, DOMFloat32Array* ima
g, ExceptionState&); |
158 | 159 |
159 // Close | 160 // Close |
160 ScriptPromise closeContext(ScriptState*); | 161 virtual ScriptPromise closeContext(ScriptState*) = 0; |
161 | 162 |
162 // Suspend/Resume | 163 // Suspend/Resume |
163 ScriptPromise suspendContext(ScriptState*); | 164 virtual ScriptPromise suspendContext(ScriptState*) = 0; |
164 ScriptPromise resumeContext(ScriptState*); | 165 virtual ScriptPromise resumeContext(ScriptState*) = 0; |
165 | 166 |
166 // When a source node has started processing and needs to be protected, | 167 // When a source node has started processing and needs to be protected, |
167 // this method tells the context to protect the node. | 168 // this method tells the context to protect the node. |
168 // | 169 // |
169 // The context itself keeps a reference to all source nodes. The source | 170 // The context itself keeps a reference to all source nodes. The source |
170 // nodes, then reference all nodes they're connected to. In turn, these | 171 // nodes, then reference all nodes they're connected to. In turn, these |
171 // nodes reference all nodes they're connected to. All nodes are ultimately | 172 // nodes reference all nodes they're connected to. All nodes are ultimately |
172 // connected to the AudioDestinationNode. When the context release a source | 173 // connected to the AudioDestinationNode. When the context release a source |
173 // node, it will be deactivated from the rendering graph along with all | 174 // node, it will be deactivated from the rendering graph along with all |
174 // other nodes it is uniquely connected to. | 175 // other nodes it is uniquely connected to. |
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
223 DEFINE_ATTRIBUTE_EVENT_LISTENER(statechange); | 224 DEFINE_ATTRIBUTE_EVENT_LISTENER(statechange); |
224 | 225 |
225 void startRendering(); | 226 void startRendering(); |
226 void fireCompletionEvent(); | 227 void fireCompletionEvent(); |
227 void notifyStateChange(); | 228 void notifyStateChange(); |
228 | 229 |
229 // A context is considered closed if: | 230 // A context is considered closed if: |
230 // - closeContext() has been called, even if the audio HW has not yet been | 231 // - closeContext() has been called, even if the audio HW has not yet been |
231 // stopped. It will be stopped eventually. | 232 // stopped. It will be stopped eventually. |
232 // - it has been stopped (or is stopping) by its execution context. | 233 // - it has been stopped (or is stopping) by its execution context. |
233 bool isContextClosed() const { return m_closeResolver || m_isStopScheduled |
| m_isCleared; } | 234 virtual bool isContextClosed() const { return m_isStopScheduled || m_isClear
ed; } |
234 | |
235 static unsigned s_hardwareContextCount; | |
236 static unsigned s_contextId; | |
237 | 235 |
238 // Get the security origin for this audio context. | 236 // Get the security origin for this audio context. |
239 SecurityOrigin* securityOrigin() const; | 237 SecurityOrigin* securityOrigin() const; |
240 | 238 |
241 protected: | 239 protected: |
242 explicit AudioContext(Document*); | 240 explicit AbstractAudioContext(Document*); |
243 AudioContext(Document*, unsigned numberOfChannels, size_t numberOfFrames, fl
oat sampleRate); | 241 AbstractAudioContext(Document*, unsigned numberOfChannels, size_t numberOfFr
ames, float sampleRate); |
| 242 |
| 243 void setContextState(AudioContextState); |
| 244 virtual void didClose() {} |
| 245 void uninitialize(); |
244 | 246 |
245 RefPtrWillBeMember<ScriptPromiseResolver> m_offlineResolver; | 247 RefPtrWillBeMember<ScriptPromiseResolver> m_offlineResolver; |
| 248 |
| 249 // FIXME(dominicc): Move m_resumeResolvers to AudioContext, because only |
| 250 // it creates these Promises. |
| 251 // Vector of promises created by resume(). It takes time to handle them, so
we collect all of |
| 252 // the promises here until they can be resolved or rejected. |
| 253 WillBeHeapVector<RefPtrWillBeMember<ScriptPromiseResolver>> m_resumeResolver
s; |
246 private: | 254 private: |
247 void initialize(); | 255 void initialize(); |
248 void uninitialize(); | |
249 | 256 |
250 // ExecutionContext calls stop twice. | 257 // ExecutionContext calls stop twice. |
251 // We'd like to schedule only one stop action for them. | 258 // We'd like to schedule only one stop action for them. |
252 bool m_isStopScheduled; | 259 bool m_isStopScheduled; |
253 bool m_isCleared; | 260 bool m_isCleared; |
254 void clear(); | 261 void clear(); |
255 | 262 |
256 void throwExceptionForClosedState(ExceptionState&); | 263 void throwExceptionForClosedState(ExceptionState&); |
257 | 264 |
258 // Set to true when the destination node has been initialized and is ready t
o process data. | 265 // Set to true when the destination node has been initialized and is ready t
o process data. |
(...skipping 12 matching lines...) Expand all Loading... |
271 Vector<AudioHandler*> m_finishedSourceHandlers; | 278 Vector<AudioHandler*> m_finishedSourceHandlers; |
272 | 279 |
273 // List of source nodes. This is either accessed when the graph lock is | 280 // List of source nodes. This is either accessed when the graph lock is |
274 // held, or on the main thread when the audio thread has finished. | 281 // held, or on the main thread when the audio thread has finished. |
275 // Oilpan: This Vector holds connection references. We must call | 282 // Oilpan: This Vector holds connection references. We must call |
276 // AudioHandler::makeConnection when we add an AudioNode to this, and must | 283 // AudioHandler::makeConnection when we add an AudioNode to this, and must |
277 // call AudioHandler::breakConnection() when we remove an AudioNode from | 284 // call AudioHandler::breakConnection() when we remove an AudioNode from |
278 // this. | 285 // this. |
279 HeapVector<Member<AudioNode>> m_activeSourceNodes; | 286 HeapVector<Member<AudioNode>> m_activeSourceNodes; |
280 | 287 |
281 // Stop rendering the audio graph. | 288 // FIXME(dominicc): Move these to AudioContext because only |
282 void stopRendering(); | 289 // it creates these Promises. |
283 | |
284 // Handle Promises for resume() and suspend() | 290 // Handle Promises for resume() and suspend() |
285 void resolvePromisesForResume(); | 291 void resolvePromisesForResume(); |
286 void resolvePromisesForResumeOnMainThread(); | 292 void resolvePromisesForResumeOnMainThread(); |
287 | 293 |
288 // Vector of promises created by resume(). It takes time to handle them, so
we collect all of | |
289 // the promises here until they can be resolved or rejected. | |
290 WillBeHeapVector<RefPtrWillBeMember<ScriptPromiseResolver>> m_resumeResolver
s; | |
291 void rejectPendingResolvers(); | 294 void rejectPendingResolvers(); |
292 | 295 |
293 // True if we're in the process of resolving promises for resume(). Resolvi
ng can take some | 296 // True if we're in the process of resolving promises for resume(). Resolvi
ng can take some |
294 // time and the audio context process loop is very fast, so we don't want to
call resolve an | 297 // time and the audio context process loop is very fast, so we don't want to
call resolve an |
295 // excessive number of times. | 298 // excessive number of times. |
296 bool m_isResolvingResumePromises; | 299 bool m_isResolvingResumePromises; |
297 | 300 |
298 unsigned m_connectionCount; | 301 unsigned m_connectionCount; |
299 | 302 |
300 // Graph locking. | 303 // Graph locking. |
301 bool m_didInitializeContextGraphMutex; | 304 bool m_didInitializeContextGraphMutex; |
302 RefPtr<DeferredTaskHandler> m_deferredTaskHandler; | 305 RefPtr<DeferredTaskHandler> m_deferredTaskHandler; |
303 | 306 |
304 Member<AudioBuffer> m_renderTarget; | 307 Member<AudioBuffer> m_renderTarget; |
305 | 308 |
306 bool m_isOfflineContext; | 309 // The state of the AbstractAudioContext. |
307 | |
308 // The state of the AudioContext. | |
309 AudioContextState m_contextState; | 310 AudioContextState m_contextState; |
310 void setContextState(AudioContextState); | |
311 | 311 |
312 AsyncAudioDecoder m_audioDecoder; | 312 AsyncAudioDecoder m_audioDecoder; |
313 | 313 |
314 // The Promise that is returned by close(); | |
315 RefPtrWillBeMember<ScriptPromiseResolver> m_closeResolver; | |
316 | |
317 // Tries to handle AudioBufferSourceNodes that were started but became disco
nnected or was never | 314 // Tries to handle AudioBufferSourceNodes that were started but became disco
nnected or was never |
318 // connected. Because these never get pulled anymore, they will stay around
forever. So if we | 315 // connected. Because these never get pulled anymore, they will stay around
forever. So if we |
319 // can, try to stop them so they can be collected. | 316 // can, try to stop them so they can be collected. |
320 void handleStoppableSourceNodes(); | 317 void handleStoppableSourceNodes(); |
321 | 318 |
322 // This is considering 32 is large enough for multiple channels audio. | 319 // This is considering 32 is large enough for multiple channels audio. |
323 // It is somewhat arbitrary and could be increased if necessary. | 320 // It is somewhat arbitrary and could be increased if necessary. |
324 enum { MaxNumberOfChannels = 32 }; | 321 enum { MaxNumberOfChannels = 32 }; |
325 | |
326 unsigned m_contextId; | |
327 }; | 322 }; |
328 | 323 |
329 } // namespace blink | 324 } // namespace blink |
330 | 325 |
331 #endif // AudioContext_h | 326 #endif // AbstractAudioContext_h |
OLD | NEW |