OLD | NEW |
| (Empty) |
1 dart_library.library('dart/web_audio', null, /* Imports */[ | |
2 'dart/_runtime', | |
3 'dart/core', | |
4 'dart/html', | |
5 'dart/_metadata', | |
6 'dart/_js_helper', | |
7 'dart/typed_data', | |
8 'dart/_interceptors', | |
9 'dart/async' | |
10 ], /* Lazy imports */[ | |
11 ], function(exports, dart, core, html, _metadata, _js_helper, typed_data, _inter
ceptors, async) { | |
12 'use strict'; | |
13 let dartx = dart.dartx; | |
14 const _connect = Symbol('_connect'); | |
15 dart.defineExtensionNames([ | |
16 'disconnect', | |
17 'connectNode', | |
18 'connectParam', | |
19 'channelCount', | |
20 'channelCountMode', | |
21 'channelInterpretation', | |
22 'context', | |
23 'numberOfInputs', | |
24 'numberOfOutputs' | |
25 ]); | |
26 class AudioNode extends html.EventTarget { | |
27 static _() { | |
28 dart.throw(new core.UnsupportedError("Not supported")); | |
29 } | |
30 get [dartx.channelCount]() { | |
31 return this.channelCount; | |
32 } | |
33 set [dartx.channelCount](value) { | |
34 this.channelCount = value; | |
35 } | |
36 get [dartx.channelCountMode]() { | |
37 return this.channelCountMode; | |
38 } | |
39 set [dartx.channelCountMode](value) { | |
40 this.channelCountMode = value; | |
41 } | |
42 get [dartx.channelInterpretation]() { | |
43 return this.channelInterpretation; | |
44 } | |
45 set [dartx.channelInterpretation](value) { | |
46 this.channelInterpretation = value; | |
47 } | |
48 get [dartx.context]() { | |
49 return this.context; | |
50 } | |
51 get [dartx.numberOfInputs]() { | |
52 return this.numberOfInputs; | |
53 } | |
54 get [dartx.numberOfOutputs]() { | |
55 return this.numberOfOutputs; | |
56 } | |
57 [_connect](destination, output, input) { | |
58 return this.connect(destination, output, input); | |
59 } | |
60 [dartx.disconnect](output) { | |
61 return this.disconnect(output); | |
62 } | |
63 [dartx.connectNode](destination, output, input) { | |
64 if (output === void 0) output = 0; | |
65 if (input === void 0) input = 0; | |
66 return this[_connect](destination, output, input); | |
67 } | |
68 [dartx.connectParam](destination, output) { | |
69 if (output === void 0) output = 0; | |
70 return this[_connect](destination, output); | |
71 } | |
72 } | |
73 dart.setSignature(AudioNode, { | |
74 constructors: () => ({_: [AudioNode, []]}), | |
75 methods: () => ({ | |
76 [_connect]: [dart.void, [dart.dynamic, core.int], [core.int]], | |
77 [dartx.disconnect]: [dart.void, [core.int]], | |
78 [dartx.connectNode]: [dart.void, [AudioNode], [core.int, core.int]], | |
79 [dartx.connectParam]: [dart.void, [AudioParam], [core.int]] | |
80 }) | |
81 }); | |
82 AudioNode[dart.metadata] = () => [dart.const(new _metadata.DomName('AudioNode'
)), dart.const(new _metadata.Experimental()), dart.const(new _js_helper.Native("
AudioNode"))]; | |
83 dart.registerExtension(dart.global.AudioNode, AudioNode); | |
84 dart.defineExtensionNames([ | |
85 'getByteFrequencyData', | |
86 'getByteTimeDomainData', | |
87 'getFloatFrequencyData', | |
88 'getFloatTimeDomainData', | |
89 'fftSize', | |
90 'frequencyBinCount', | |
91 'maxDecibels', | |
92 'minDecibels', | |
93 'smoothingTimeConstant' | |
94 ]); | |
95 class AnalyserNode extends AudioNode { | |
96 static _() { | |
97 dart.throw(new core.UnsupportedError("Not supported")); | |
98 } | |
99 get [dartx.fftSize]() { | |
100 return this.fftSize; | |
101 } | |
102 set [dartx.fftSize](value) { | |
103 this.fftSize = value; | |
104 } | |
105 get [dartx.frequencyBinCount]() { | |
106 return this.frequencyBinCount; | |
107 } | |
108 get [dartx.maxDecibels]() { | |
109 return this.maxDecibels; | |
110 } | |
111 set [dartx.maxDecibels](value) { | |
112 this.maxDecibels = value; | |
113 } | |
114 get [dartx.minDecibels]() { | |
115 return this.minDecibels; | |
116 } | |
117 set [dartx.minDecibels](value) { | |
118 this.minDecibels = value; | |
119 } | |
120 get [dartx.smoothingTimeConstant]() { | |
121 return this.smoothingTimeConstant; | |
122 } | |
123 set [dartx.smoothingTimeConstant](value) { | |
124 this.smoothingTimeConstant = value; | |
125 } | |
126 [dartx.getByteFrequencyData](array) { | |
127 return this.getByteFrequencyData(array); | |
128 } | |
129 [dartx.getByteTimeDomainData](array) { | |
130 return this.getByteTimeDomainData(array); | |
131 } | |
132 [dartx.getFloatFrequencyData](array) { | |
133 return this.getFloatFrequencyData(array); | |
134 } | |
135 [dartx.getFloatTimeDomainData](array) { | |
136 return this.getFloatTimeDomainData(array); | |
137 } | |
138 } | |
139 dart.setSignature(AnalyserNode, { | |
140 constructors: () => ({_: [AnalyserNode, []]}), | |
141 methods: () => ({ | |
142 [dartx.getByteFrequencyData]: [dart.void, [typed_data.Uint8List]], | |
143 [dartx.getByteTimeDomainData]: [dart.void, [typed_data.Uint8List]], | |
144 [dartx.getFloatFrequencyData]: [dart.void, [typed_data.Float32List]], | |
145 [dartx.getFloatTimeDomainData]: [dart.void, [typed_data.Float32List]] | |
146 }) | |
147 }); | |
148 AnalyserNode[dart.metadata] = () => [dart.const(new _metadata.DocsEditable()),
dart.const(new _metadata.DomName('AnalyserNode')), dart.const(new _metadata.Exp
erimental()), dart.const(new _js_helper.Native("AnalyserNode,RealtimeAnalyserNod
e"))]; | |
149 dart.registerExtension(dart.global.AnalyserNode, AnalyserNode); | |
150 dart.defineExtensionNames([ | |
151 'getChannelData', | |
152 'duration', | |
153 'length', | |
154 'numberOfChannels', | |
155 'sampleRate' | |
156 ]); | |
157 class AudioBuffer extends _interceptors.Interceptor { | |
158 static _() { | |
159 dart.throw(new core.UnsupportedError("Not supported")); | |
160 } | |
161 get [dartx.duration]() { | |
162 return this.duration; | |
163 } | |
164 get [dartx.length]() { | |
165 return this.length; | |
166 } | |
167 get [dartx.numberOfChannels]() { | |
168 return this.numberOfChannels; | |
169 } | |
170 get [dartx.sampleRate]() { | |
171 return this.sampleRate; | |
172 } | |
173 [dartx.getChannelData](channelIndex) { | |
174 return this.getChannelData(channelIndex); | |
175 } | |
176 } | |
177 dart.setSignature(AudioBuffer, { | |
178 constructors: () => ({_: [AudioBuffer, []]}), | |
179 methods: () => ({[dartx.getChannelData]: [typed_data.Float32List, [core.int]
]}) | |
180 }); | |
181 AudioBuffer[dart.metadata] = () => [dart.const(new _metadata.DocsEditable()),
dart.const(new _metadata.DomName('AudioBuffer')), dart.const(new _metadata.Exper
imental()), dart.const(new _js_helper.Native("AudioBuffer"))]; | |
182 dart.registerExtension(dart.global.AudioBuffer, AudioBuffer); | |
183 const AudioBufferCallback = dart.typedef('AudioBufferCallback', () => dart.fun
ctionType(dart.void, [AudioBuffer])); | |
184 class AudioSourceNode extends AudioNode { | |
185 static _() { | |
186 dart.throw(new core.UnsupportedError("Not supported")); | |
187 } | |
188 } | |
189 dart.setSignature(AudioSourceNode, { | |
190 constructors: () => ({_: [AudioSourceNode, []]}) | |
191 }); | |
192 AudioSourceNode[dart.metadata] = () => [dart.const(new _metadata.DocsEditable(
)), dart.const(new _metadata.DomName('AudioSourceNode')), dart.const(new _metada
ta.Experimental()), dart.const(new _js_helper.Native("AudioSourceNode"))]; | |
193 dart.registerExtension(dart.global.AudioSourceNode, AudioSourceNode); | |
194 dart.defineExtensionNames([ | |
195 'start', | |
196 'stop', | |
197 'onEnded', | |
198 'buffer', | |
199 'loop', | |
200 'loopEnd', | |
201 'loopStart', | |
202 'playbackRate' | |
203 ]); | |
204 class AudioBufferSourceNode extends AudioSourceNode { | |
205 [dartx.start](when, grainOffset, grainDuration) { | |
206 if (grainOffset === void 0) grainOffset = null; | |
207 if (grainDuration === void 0) grainDuration = null; | |
208 if (!!this.start) { | |
209 if (grainDuration != null) { | |
210 this.start(when, grainOffset, grainDuration); | |
211 } else if (grainOffset != null) { | |
212 this.start(when, grainOffset); | |
213 } else { | |
214 this.start(when); | |
215 } | |
216 } else { | |
217 if (grainDuration != null) { | |
218 this.noteOn(when, grainOffset, grainDuration); | |
219 } else if (grainOffset != null) { | |
220 this.noteOn(when, grainOffset); | |
221 } else { | |
222 this.noteOn(when); | |
223 } | |
224 } | |
225 } | |
226 [dartx.stop](when) { | |
227 if (!!this.stop) { | |
228 this.stop(when); | |
229 } else { | |
230 this.noteOff(when); | |
231 } | |
232 } | |
233 static _() { | |
234 dart.throw(new core.UnsupportedError("Not supported")); | |
235 } | |
236 get [dartx.buffer]() { | |
237 return this.buffer; | |
238 } | |
239 set [dartx.buffer](value) { | |
240 this.buffer = value; | |
241 } | |
242 get [dartx.loop]() { | |
243 return this.loop; | |
244 } | |
245 set [dartx.loop](value) { | |
246 this.loop = value; | |
247 } | |
248 get [dartx.loopEnd]() { | |
249 return this.loopEnd; | |
250 } | |
251 set [dartx.loopEnd](value) { | |
252 this.loopEnd = value; | |
253 } | |
254 get [dartx.loopStart]() { | |
255 return this.loopStart; | |
256 } | |
257 set [dartx.loopStart](value) { | |
258 this.loopStart = value; | |
259 } | |
260 get [dartx.playbackRate]() { | |
261 return this.playbackRate; | |
262 } | |
263 get [dartx.onEnded]() { | |
264 return AudioBufferSourceNode.endedEvent.forTarget(this); | |
265 } | |
266 } | |
267 dart.setSignature(AudioBufferSourceNode, { | |
268 constructors: () => ({_: [AudioBufferSourceNode, []]}), | |
269 methods: () => ({ | |
270 [dartx.start]: [dart.void, [core.num], [core.num, core.num]], | |
271 [dartx.stop]: [dart.void, [core.num]] | |
272 }) | |
273 }); | |
274 AudioBufferSourceNode[dart.metadata] = () => [dart.const(new _metadata.DomName
('AudioBufferSourceNode')), dart.const(new _metadata.SupportedBrowser(_metadata.
SupportedBrowser.CHROME)), dart.const(new _metadata.SupportedBrowser(_metadata.S
upportedBrowser.FIREFOX)), dart.const(new _metadata.Experimental()), dart.const(
new _js_helper.Native("AudioBufferSourceNode"))]; | |
275 AudioBufferSourceNode.endedEvent = dart.const(new (html.EventStreamProvider$(h
tml.Event))('ended')); | |
276 dart.registerExtension(dart.global.AudioBufferSourceNode, AudioBufferSourceNod
e); | |
277 const _decodeAudioData = Symbol('_decodeAudioData'); | |
278 dart.defineExtensionNames([ | |
279 'createAnalyser', | |
280 'createBiquadFilter', | |
281 'createBuffer', | |
282 'createBufferSource', | |
283 'createChannelMerger', | |
284 'createChannelSplitter', | |
285 'createConvolver', | |
286 'createDelay', | |
287 'createDynamicsCompressor', | |
288 'createMediaElementSource', | |
289 'createMediaStreamDestination', | |
290 'createMediaStreamSource', | |
291 'createOscillator', | |
292 'createPanner', | |
293 'createPeriodicWave', | |
294 'createWaveShaper', | |
295 'startRendering', | |
296 'onComplete', | |
297 'createGain', | |
298 'createScriptProcessor', | |
299 'decodeAudioData', | |
300 'currentTime', | |
301 'destination', | |
302 'listener', | |
303 'sampleRate' | |
304 ]); | |
305 class AudioContext extends html.EventTarget { | |
306 static _() { | |
307 dart.throw(new core.UnsupportedError("Not supported")); | |
308 } | |
309 static get supported() { | |
310 return !!(window.AudioContext || window.webkitAudioContext); | |
311 } | |
312 get [dartx.currentTime]() { | |
313 return this.currentTime; | |
314 } | |
315 get [dartx.destination]() { | |
316 return this.destination; | |
317 } | |
318 get [dartx.listener]() { | |
319 return this.listener; | |
320 } | |
321 get [dartx.sampleRate]() { | |
322 return this.sampleRate; | |
323 } | |
324 [dartx.createAnalyser]() { | |
325 return this.createAnalyser(); | |
326 } | |
327 [dartx.createBiquadFilter]() { | |
328 return this.createBiquadFilter(); | |
329 } | |
330 [dartx.createBuffer](numberOfChannels, numberOfFrames, sampleRate) { | |
331 return this.createBuffer(numberOfChannels, numberOfFrames, sampleRate); | |
332 } | |
333 [dartx.createBufferSource]() { | |
334 return this.createBufferSource(); | |
335 } | |
336 [dartx.createChannelMerger](numberOfInputs) { | |
337 return this.createChannelMerger(numberOfInputs); | |
338 } | |
339 [dartx.createChannelSplitter](numberOfOutputs) { | |
340 return this.createChannelSplitter(numberOfOutputs); | |
341 } | |
342 [dartx.createConvolver]() { | |
343 return this.createConvolver(); | |
344 } | |
345 [dartx.createDelay](maxDelayTime) { | |
346 return this.createDelay(maxDelayTime); | |
347 } | |
348 [dartx.createDynamicsCompressor]() { | |
349 return this.createDynamicsCompressor(); | |
350 } | |
351 [dartx.createMediaElementSource](mediaElement) { | |
352 return this.createMediaElementSource(mediaElement); | |
353 } | |
354 [dartx.createMediaStreamDestination]() { | |
355 return this.createMediaStreamDestination(); | |
356 } | |
357 [dartx.createMediaStreamSource](mediaStream) { | |
358 return this.createMediaStreamSource(mediaStream); | |
359 } | |
360 [dartx.createOscillator]() { | |
361 return this.createOscillator(); | |
362 } | |
363 [dartx.createPanner]() { | |
364 return this.createPanner(); | |
365 } | |
366 [dartx.createPeriodicWave](real, imag) { | |
367 return this.createPeriodicWave(real, imag); | |
368 } | |
369 [dartx.createWaveShaper]() { | |
370 return this.createWaveShaper(); | |
371 } | |
372 [_decodeAudioData](audioData, successCallback, errorCallback) { | |
373 return this.decodeAudioData(audioData, successCallback, errorCallback); | |
374 } | |
375 [dartx.startRendering]() { | |
376 return this.startRendering(); | |
377 } | |
378 get [dartx.onComplete]() { | |
379 return AudioContext.completeEvent.forTarget(this); | |
380 } | |
381 static new() { | |
382 return dart.as(new (window.AudioContext || window.webkitAudioContext)(), A
udioContext); | |
383 } | |
384 [dartx.createGain]() { | |
385 if (this.createGain !== undefined) { | |
386 return dart.as(this.createGain(), GainNode); | |
387 } else { | |
388 return dart.as(this.createGainNode(), GainNode); | |
389 } | |
390 } | |
391 [dartx.createScriptProcessor](bufferSize, numberOfInputChannels, numberOfOut
putChannels) { | |
392 if (numberOfInputChannels === void 0) numberOfInputChannels = null; | |
393 if (numberOfOutputChannels === void 0) numberOfOutputChannels = null; | |
394 let func = this.createScriptProcessor || this.createJavaScriptNode; | |
395 if (numberOfOutputChannels != null) { | |
396 return dart.as(func.call(this, bufferSize, numberOfInputChannels, number
OfOutputChannels), ScriptProcessorNode); | |
397 } else if (numberOfInputChannels != null) { | |
398 return dart.as(func.call(this, bufferSize, numberOfInputChannels), Scrip
tProcessorNode); | |
399 } else { | |
400 return dart.as(func.call(this, bufferSize), ScriptProcessorNode); | |
401 } | |
402 } | |
403 [dartx.decodeAudioData](audioData) { | |
404 let completer = async.Completer$(AudioBuffer).new(); | |
405 this[_decodeAudioData](audioData, dart.fn(value => { | |
406 completer.complete(value); | |
407 }, dart.void, [AudioBuffer]), dart.fn(error => { | |
408 if (error == null) { | |
409 completer.completeError(''); | |
410 } else { | |
411 completer.completeError(error); | |
412 } | |
413 }, dart.void, [AudioBuffer])); | |
414 return completer.future; | |
415 } | |
416 } | |
417 dart.setSignature(AudioContext, { | |
418 constructors: () => ({ | |
419 _: [AudioContext, []], | |
420 new: [AudioContext, []] | |
421 }), | |
422 methods: () => ({ | |
423 [dartx.createAnalyser]: [AnalyserNode, []], | |
424 [dartx.createBiquadFilter]: [BiquadFilterNode, []], | |
425 [dartx.createBuffer]: [AudioBuffer, [core.int, core.int, core.num]], | |
426 [dartx.createBufferSource]: [AudioBufferSourceNode, []], | |
427 [dartx.createChannelMerger]: [ChannelMergerNode, [], [core.int]], | |
428 [dartx.createChannelSplitter]: [ChannelSplitterNode, [], [core.int]], | |
429 [dartx.createConvolver]: [ConvolverNode, []], | |
430 [dartx.createDelay]: [DelayNode, [], [core.num]], | |
431 [dartx.createDynamicsCompressor]: [DynamicsCompressorNode, []], | |
432 [dartx.createMediaElementSource]: [MediaElementAudioSourceNode, [html.Medi
aElement]], | |
433 [dartx.createMediaStreamDestination]: [MediaStreamAudioDestinationNode, []
], | |
434 [dartx.createMediaStreamSource]: [MediaStreamAudioSourceNode, [html.MediaS
tream]], | |
435 [dartx.createOscillator]: [OscillatorNode, []], | |
436 [dartx.createPanner]: [PannerNode, []], | |
437 [dartx.createPeriodicWave]: [PeriodicWave, [typed_data.Float32List, typed_
data.Float32List]], | |
438 [dartx.createWaveShaper]: [WaveShaperNode, []], | |
439 [_decodeAudioData]: [dart.void, [typed_data.ByteBuffer, AudioBufferCallbac
k], [AudioBufferCallback]], | |
440 [dartx.startRendering]: [dart.void, []], | |
441 [dartx.createGain]: [GainNode, []], | |
442 [dartx.createScriptProcessor]: [ScriptProcessorNode, [core.int], [core.int
, core.int]], | |
443 [dartx.decodeAudioData]: [async.Future$(AudioBuffer), [typed_data.ByteBuff
er]] | |
444 }) | |
445 }); | |
446 AudioContext[dart.metadata] = () => [dart.const(new _metadata.DomName('AudioCo
ntext')), dart.const(new _metadata.SupportedBrowser(_metadata.SupportedBrowser.C
HROME)), dart.const(new _metadata.SupportedBrowser(_metadata.SupportedBrowser.FI
REFOX)), dart.const(new _metadata.Experimental()), dart.const(new _js_helper.Nat
ive("AudioContext,webkitAudioContext"))]; | |
447 AudioContext.completeEvent = dart.const(new (html.EventStreamProvider$(html.Ev
ent))('complete')); | |
448 dart.registerExtension(dart.global.AudioContext, AudioContext); | |
449 dart.defineExtensionNames([ | |
450 'maxChannelCount' | |
451 ]); | |
452 class AudioDestinationNode extends AudioNode { | |
453 static _() { | |
454 dart.throw(new core.UnsupportedError("Not supported")); | |
455 } | |
456 get [dartx.maxChannelCount]() { | |
457 return this.maxChannelCount; | |
458 } | |
459 } | |
460 dart.setSignature(AudioDestinationNode, { | |
461 constructors: () => ({_: [AudioDestinationNode, []]}) | |
462 }); | |
463 AudioDestinationNode[dart.metadata] = () => [dart.const(new _metadata.DocsEdit
able()), dart.const(new _metadata.DomName('AudioDestinationNode')), dart.const(n
ew _metadata.Experimental()), dart.const(new _js_helper.Native("AudioDestination
Node"))]; | |
464 dart.registerExtension(dart.global.AudioDestinationNode, AudioDestinationNode)
; | |
465 dart.defineExtensionNames([ | |
466 'setOrientation', | |
467 'setPosition', | |
468 'setVelocity', | |
469 'dopplerFactor', | |
470 'speedOfSound' | |
471 ]); | |
472 class AudioListener extends _interceptors.Interceptor { | |
473 static _() { | |
474 dart.throw(new core.UnsupportedError("Not supported")); | |
475 } | |
476 get [dartx.dopplerFactor]() { | |
477 return this.dopplerFactor; | |
478 } | |
479 set [dartx.dopplerFactor](value) { | |
480 this.dopplerFactor = value; | |
481 } | |
482 get [dartx.speedOfSound]() { | |
483 return this.speedOfSound; | |
484 } | |
485 set [dartx.speedOfSound](value) { | |
486 this.speedOfSound = value; | |
487 } | |
488 [dartx.setOrientation](x, y, z, xUp, yUp, zUp) { | |
489 return this.setOrientation(x, y, z, xUp, yUp, zUp); | |
490 } | |
491 [dartx.setPosition](x, y, z) { | |
492 return this.setPosition(x, y, z); | |
493 } | |
494 [dartx.setVelocity](x, y, z) { | |
495 return this.setVelocity(x, y, z); | |
496 } | |
497 } | |
498 dart.setSignature(AudioListener, { | |
499 constructors: () => ({_: [AudioListener, []]}), | |
500 methods: () => ({ | |
501 [dartx.setOrientation]: [dart.void, [core.num, core.num, core.num, core.nu
m, core.num, core.num]], | |
502 [dartx.setPosition]: [dart.void, [core.num, core.num, core.num]], | |
503 [dartx.setVelocity]: [dart.void, [core.num, core.num, core.num]] | |
504 }) | |
505 }); | |
506 AudioListener[dart.metadata] = () => [dart.const(new _metadata.DocsEditable())
, dart.const(new _metadata.DomName('AudioListener')), dart.const(new _metadata.E
xperimental()), dart.const(new _js_helper.Native("AudioListener"))]; | |
507 dart.registerExtension(dart.global.AudioListener, AudioListener); | |
508 dart.defineExtensionNames([ | |
509 'cancelScheduledValues', | |
510 'exponentialRampToValueAtTime', | |
511 'linearRampToValueAtTime', | |
512 'setTargetAtTime', | |
513 'setValueAtTime', | |
514 'setValueCurveAtTime', | |
515 'defaultValue', | |
516 'value' | |
517 ]); | |
518 class AudioParam extends _interceptors.Interceptor { | |
519 static _() { | |
520 dart.throw(new core.UnsupportedError("Not supported")); | |
521 } | |
522 get [dartx.defaultValue]() { | |
523 return this.defaultValue; | |
524 } | |
525 get [dartx.value]() { | |
526 return this.value; | |
527 } | |
528 set [dartx.value](value) { | |
529 this.value = value; | |
530 } | |
531 [dartx.cancelScheduledValues](startTime) { | |
532 return this.cancelScheduledValues(startTime); | |
533 } | |
534 [dartx.exponentialRampToValueAtTime](value, time) { | |
535 return this.exponentialRampToValueAtTime(value, time); | |
536 } | |
537 [dartx.linearRampToValueAtTime](value, time) { | |
538 return this.linearRampToValueAtTime(value, time); | |
539 } | |
540 [dartx.setTargetAtTime](target, time, timeConstant) { | |
541 return this.setTargetAtTime(target, time, timeConstant); | |
542 } | |
543 [dartx.setValueAtTime](value, time) { | |
544 return this.setValueAtTime(value, time); | |
545 } | |
546 [dartx.setValueCurveAtTime](values, time, duration) { | |
547 return this.setValueCurveAtTime(values, time, duration); | |
548 } | |
549 } | |
550 dart.setSignature(AudioParam, { | |
551 constructors: () => ({_: [AudioParam, []]}), | |
552 methods: () => ({ | |
553 [dartx.cancelScheduledValues]: [dart.void, [core.num]], | |
554 [dartx.exponentialRampToValueAtTime]: [dart.void, [core.num, core.num]], | |
555 [dartx.linearRampToValueAtTime]: [dart.void, [core.num, core.num]], | |
556 [dartx.setTargetAtTime]: [dart.void, [core.num, core.num, core.num]], | |
557 [dartx.setValueAtTime]: [dart.void, [core.num, core.num]], | |
558 [dartx.setValueCurveAtTime]: [dart.void, [typed_data.Float32List, core.num
, core.num]] | |
559 }) | |
560 }); | |
561 AudioParam[dart.metadata] = () => [dart.const(new _metadata.DocsEditable()), d
art.const(new _metadata.DomName('AudioParam')), dart.const(new _metadata.Experim
ental()), dart.const(new _js_helper.Native("AudioParam"))]; | |
562 dart.registerExtension(dart.global.AudioParam, AudioParam); | |
563 dart.defineExtensionNames([ | |
564 'inputBuffer', | |
565 'outputBuffer', | |
566 'playbackTime' | |
567 ]); | |
568 class AudioProcessingEvent extends html.Event { | |
569 static _() { | |
570 dart.throw(new core.UnsupportedError("Not supported")); | |
571 } | |
572 get [dartx.inputBuffer]() { | |
573 return this.inputBuffer; | |
574 } | |
575 get [dartx.outputBuffer]() { | |
576 return this.outputBuffer; | |
577 } | |
578 get [dartx.playbackTime]() { | |
579 return this.playbackTime; | |
580 } | |
581 } | |
582 dart.setSignature(AudioProcessingEvent, { | |
583 constructors: () => ({_: [AudioProcessingEvent, []]}) | |
584 }); | |
585 AudioProcessingEvent[dart.metadata] = () => [dart.const(new _metadata.DocsEdit
able()), dart.const(new _metadata.DomName('AudioProcessingEvent')), dart.const(n
ew _metadata.Experimental()), dart.const(new _js_helper.Native("AudioProcessingE
vent"))]; | |
586 dart.registerExtension(dart.global.AudioProcessingEvent, AudioProcessingEvent)
; | |
587 dart.defineExtensionNames([ | |
588 'getFrequencyResponse', | |
589 'Q', | |
590 'detune', | |
591 'frequency', | |
592 'gain', | |
593 'type' | |
594 ]); | |
595 class BiquadFilterNode extends AudioNode { | |
596 static _() { | |
597 dart.throw(new core.UnsupportedError("Not supported")); | |
598 } | |
599 get [dartx.Q]() { | |
600 return this.Q; | |
601 } | |
602 get [dartx.detune]() { | |
603 return this.detune; | |
604 } | |
605 get [dartx.frequency]() { | |
606 return this.frequency; | |
607 } | |
608 get [dartx.gain]() { | |
609 return this.gain; | |
610 } | |
611 get [dartx.type]() { | |
612 return this.type; | |
613 } | |
614 set [dartx.type](value) { | |
615 this.type = value; | |
616 } | |
617 [dartx.getFrequencyResponse](frequencyHz, magResponse, phaseResponse) { | |
618 return this.getFrequencyResponse(frequencyHz, magResponse, phaseResponse); | |
619 } | |
620 } | |
621 dart.setSignature(BiquadFilterNode, { | |
622 constructors: () => ({_: [BiquadFilterNode, []]}), | |
623 methods: () => ({[dartx.getFrequencyResponse]: [dart.void, [typed_data.Float
32List, typed_data.Float32List, typed_data.Float32List]]}) | |
624 }); | |
625 BiquadFilterNode[dart.metadata] = () => [dart.const(new _metadata.DocsEditable
()), dart.const(new _metadata.DomName('BiquadFilterNode')), dart.const(new _meta
data.Experimental()), dart.const(new _js_helper.Native("BiquadFilterNode"))]; | |
626 dart.registerExtension(dart.global.BiquadFilterNode, BiquadFilterNode); | |
627 class ChannelMergerNode extends AudioNode { | |
628 static _() { | |
629 dart.throw(new core.UnsupportedError("Not supported")); | |
630 } | |
631 } | |
632 dart.setSignature(ChannelMergerNode, { | |
633 constructors: () => ({_: [ChannelMergerNode, []]}) | |
634 }); | |
635 ChannelMergerNode[dart.metadata] = () => [dart.const(new _metadata.DocsEditabl
e()), dart.const(new _metadata.DomName('ChannelMergerNode')), dart.const(new _me
tadata.Experimental()), dart.const(new _js_helper.Native("ChannelMergerNode,Audi
oChannelMerger"))]; | |
636 dart.registerExtension(dart.global.ChannelMergerNode, ChannelMergerNode); | |
637 class ChannelSplitterNode extends AudioNode { | |
638 static _() { | |
639 dart.throw(new core.UnsupportedError("Not supported")); | |
640 } | |
641 } | |
642 dart.setSignature(ChannelSplitterNode, { | |
643 constructors: () => ({_: [ChannelSplitterNode, []]}) | |
644 }); | |
645 ChannelSplitterNode[dart.metadata] = () => [dart.const(new _metadata.DocsEdita
ble()), dart.const(new _metadata.DomName('ChannelSplitterNode')), dart.const(new
_metadata.Experimental()), dart.const(new _js_helper.Native("ChannelSplitterNod
e,AudioChannelSplitter"))]; | |
646 dart.registerExtension(dart.global.ChannelSplitterNode, ChannelSplitterNode); | |
647 dart.defineExtensionNames([ | |
648 'buffer', | |
649 'normalize' | |
650 ]); | |
651 class ConvolverNode extends AudioNode { | |
652 static _() { | |
653 dart.throw(new core.UnsupportedError("Not supported")); | |
654 } | |
655 get [dartx.buffer]() { | |
656 return this.buffer; | |
657 } | |
658 set [dartx.buffer](value) { | |
659 this.buffer = value; | |
660 } | |
661 get [dartx.normalize]() { | |
662 return this.normalize; | |
663 } | |
664 set [dartx.normalize](value) { | |
665 this.normalize = value; | |
666 } | |
667 } | |
668 dart.setSignature(ConvolverNode, { | |
669 constructors: () => ({_: [ConvolverNode, []]}) | |
670 }); | |
671 ConvolverNode[dart.metadata] = () => [dart.const(new _metadata.DocsEditable())
, dart.const(new _metadata.DomName('ConvolverNode')), dart.const(new _metadata.E
xperimental()), dart.const(new _js_helper.Native("ConvolverNode"))]; | |
672 dart.registerExtension(dart.global.ConvolverNode, ConvolverNode); | |
673 dart.defineExtensionNames([ | |
674 'delayTime' | |
675 ]); | |
676 class DelayNode extends AudioNode { | |
677 static _() { | |
678 dart.throw(new core.UnsupportedError("Not supported")); | |
679 } | |
680 get [dartx.delayTime]() { | |
681 return this.delayTime; | |
682 } | |
683 } | |
684 dart.setSignature(DelayNode, { | |
685 constructors: () => ({_: [DelayNode, []]}) | |
686 }); | |
687 DelayNode[dart.metadata] = () => [dart.const(new _metadata.DocsEditable()), da
rt.const(new _metadata.DomName('DelayNode')), dart.const(new _metadata.Experimen
tal()), dart.const(new _js_helper.Native("DelayNode"))]; | |
688 dart.registerExtension(dart.global.DelayNode, DelayNode); | |
689 dart.defineExtensionNames([ | |
690 'attack', | |
691 'knee', | |
692 'ratio', | |
693 'reduction', | |
694 'release', | |
695 'threshold' | |
696 ]); | |
697 class DynamicsCompressorNode extends AudioNode { | |
698 static _() { | |
699 dart.throw(new core.UnsupportedError("Not supported")); | |
700 } | |
701 get [dartx.attack]() { | |
702 return this.attack; | |
703 } | |
704 get [dartx.knee]() { | |
705 return this.knee; | |
706 } | |
707 get [dartx.ratio]() { | |
708 return this.ratio; | |
709 } | |
710 get [dartx.reduction]() { | |
711 return this.reduction; | |
712 } | |
713 get [dartx.release]() { | |
714 return this.release; | |
715 } | |
716 get [dartx.threshold]() { | |
717 return this.threshold; | |
718 } | |
719 } | |
720 dart.setSignature(DynamicsCompressorNode, { | |
721 constructors: () => ({_: [DynamicsCompressorNode, []]}) | |
722 }); | |
723 DynamicsCompressorNode[dart.metadata] = () => [dart.const(new _metadata.DocsEd
itable()), dart.const(new _metadata.DomName('DynamicsCompressorNode')), dart.con
st(new _metadata.Experimental()), dart.const(new _js_helper.Native("DynamicsComp
ressorNode"))]; | |
724 dart.registerExtension(dart.global.DynamicsCompressorNode, DynamicsCompressorN
ode); | |
725 dart.defineExtensionNames([ | |
726 'gain' | |
727 ]); | |
728 class GainNode extends AudioNode { | |
729 static _() { | |
730 dart.throw(new core.UnsupportedError("Not supported")); | |
731 } | |
732 get [dartx.gain]() { | |
733 return this.gain; | |
734 } | |
735 } | |
736 dart.setSignature(GainNode, { | |
737 constructors: () => ({_: [GainNode, []]}) | |
738 }); | |
739 GainNode[dart.metadata] = () => [dart.const(new _metadata.DocsEditable()), dar
t.const(new _metadata.DomName('GainNode')), dart.const(new _metadata.Experimenta
l()), dart.const(new _js_helper.Native("GainNode,AudioGainNode"))]; | |
740 dart.registerExtension(dart.global.GainNode, GainNode); | |
741 dart.defineExtensionNames([ | |
742 'mediaElement' | |
743 ]); | |
744 class MediaElementAudioSourceNode extends AudioSourceNode { | |
745 static _() { | |
746 dart.throw(new core.UnsupportedError("Not supported")); | |
747 } | |
748 get [dartx.mediaElement]() { | |
749 return this.mediaElement; | |
750 } | |
751 } | |
752 dart.setSignature(MediaElementAudioSourceNode, { | |
753 constructors: () => ({_: [MediaElementAudioSourceNode, []]}) | |
754 }); | |
755 MediaElementAudioSourceNode[dart.metadata] = () => [dart.const(new _metadata.D
ocsEditable()), dart.const(new _metadata.DomName('MediaElementAudioSourceNode'))
, dart.const(new _metadata.Experimental()), dart.const(new _js_helper.Native("Me
diaElementAudioSourceNode"))]; | |
756 dart.registerExtension(dart.global.MediaElementAudioSourceNode, MediaElementAu
dioSourceNode); | |
757 dart.defineExtensionNames([ | |
758 'stream' | |
759 ]); | |
760 class MediaStreamAudioDestinationNode extends AudioNode { | |
761 static _() { | |
762 dart.throw(new core.UnsupportedError("Not supported")); | |
763 } | |
764 get [dartx.stream]() { | |
765 return this.stream; | |
766 } | |
767 } | |
768 dart.setSignature(MediaStreamAudioDestinationNode, { | |
769 constructors: () => ({_: [MediaStreamAudioDestinationNode, []]}) | |
770 }); | |
771 MediaStreamAudioDestinationNode[dart.metadata] = () => [dart.const(new _metada
ta.DocsEditable()), dart.const(new _metadata.DomName('MediaStreamAudioDestinatio
nNode')), dart.const(new _metadata.Experimental()), dart.const(new _js_helper.Na
tive("MediaStreamAudioDestinationNode"))]; | |
772 dart.registerExtension(dart.global.MediaStreamAudioDestinationNode, MediaStrea
mAudioDestinationNode); | |
773 dart.defineExtensionNames([ | |
774 'mediaStream' | |
775 ]); | |
776 class MediaStreamAudioSourceNode extends AudioSourceNode { | |
777 static _() { | |
778 dart.throw(new core.UnsupportedError("Not supported")); | |
779 } | |
780 get [dartx.mediaStream]() { | |
781 return this.mediaStream; | |
782 } | |
783 } | |
784 dart.setSignature(MediaStreamAudioSourceNode, { | |
785 constructors: () => ({_: [MediaStreamAudioSourceNode, []]}) | |
786 }); | |
787 MediaStreamAudioSourceNode[dart.metadata] = () => [dart.const(new _metadata.Do
csEditable()), dart.const(new _metadata.DomName('MediaStreamAudioSourceNode')),
dart.const(new _metadata.Experimental()), dart.const(new _js_helper.Native("Medi
aStreamAudioSourceNode"))]; | |
788 dart.registerExtension(dart.global.MediaStreamAudioSourceNode, MediaStreamAudi
oSourceNode); | |
789 dart.defineExtensionNames([ | |
790 'renderedBuffer' | |
791 ]); | |
792 class OfflineAudioCompletionEvent extends html.Event { | |
793 static _() { | |
794 dart.throw(new core.UnsupportedError("Not supported")); | |
795 } | |
796 get [dartx.renderedBuffer]() { | |
797 return this.renderedBuffer; | |
798 } | |
799 } | |
800 dart.setSignature(OfflineAudioCompletionEvent, { | |
801 constructors: () => ({_: [OfflineAudioCompletionEvent, []]}) | |
802 }); | |
803 OfflineAudioCompletionEvent[dart.metadata] = () => [dart.const(new _metadata.D
ocsEditable()), dart.const(new _metadata.DomName('OfflineAudioCompletionEvent'))
, dart.const(new _metadata.Experimental()), dart.const(new _js_helper.Native("Of
flineAudioCompletionEvent"))]; | |
804 dart.registerExtension(dart.global.OfflineAudioCompletionEvent, OfflineAudioCo
mpletionEvent); | |
805 class OfflineAudioContext extends AudioContext { | |
806 static _() { | |
807 dart.throw(new core.UnsupportedError("Not supported")); | |
808 } | |
809 static new(numberOfChannels, numberOfFrames, sampleRate) { | |
810 return OfflineAudioContext._create_1(numberOfChannels, numberOfFrames, sam
pleRate); | |
811 } | |
812 static _create_1(numberOfChannels, numberOfFrames, sampleRate) { | |
813 return dart.as(new OfflineAudioContext(numberOfChannels, numberOfFrames, s
ampleRate), OfflineAudioContext); | |
814 } | |
815 } | |
816 dart.setSignature(OfflineAudioContext, { | |
817 constructors: () => ({ | |
818 _: [OfflineAudioContext, []], | |
819 new: [OfflineAudioContext, [core.int, core.int, core.num]] | |
820 }), | |
821 statics: () => ({_create_1: [OfflineAudioContext, [dart.dynamic, dart.dynami
c, dart.dynamic]]}), | |
822 names: ['_create_1'] | |
823 }); | |
824 OfflineAudioContext[dart.metadata] = () => [dart.const(new _metadata.DocsEdita
ble()), dart.const(new _metadata.DomName('OfflineAudioContext')), dart.const(new
_metadata.Experimental()), dart.const(new _js_helper.Native("OfflineAudioContex
t"))]; | |
825 dart.registerExtension(dart.global.OfflineAudioContext, OfflineAudioContext); | |
826 dart.defineExtensionNames([ | |
827 'noteOff', | |
828 'noteOn', | |
829 'setPeriodicWave', | |
830 'start', | |
831 'stop', | |
832 'onEnded', | |
833 'detune', | |
834 'frequency', | |
835 'type' | |
836 ]); | |
837 class OscillatorNode extends AudioSourceNode { | |
838 static _() { | |
839 dart.throw(new core.UnsupportedError("Not supported")); | |
840 } | |
841 get [dartx.detune]() { | |
842 return this.detune; | |
843 } | |
844 get [dartx.frequency]() { | |
845 return this.frequency; | |
846 } | |
847 get [dartx.type]() { | |
848 return this.type; | |
849 } | |
850 set [dartx.type](value) { | |
851 this.type = value; | |
852 } | |
853 [dartx.noteOff](when) { | |
854 return this.noteOff(when); | |
855 } | |
856 [dartx.noteOn](when) { | |
857 return this.noteOn(when); | |
858 } | |
859 [dartx.setPeriodicWave](periodicWave) { | |
860 return this.setPeriodicWave(periodicWave); | |
861 } | |
862 [dartx.start](when) { | |
863 return this.start(when); | |
864 } | |
865 [dartx.stop](when) { | |
866 return this.stop(when); | |
867 } | |
868 get [dartx.onEnded]() { | |
869 return OscillatorNode.endedEvent.forTarget(this); | |
870 } | |
871 } | |
872 dart.setSignature(OscillatorNode, { | |
873 constructors: () => ({_: [OscillatorNode, []]}), | |
874 methods: () => ({ | |
875 [dartx.noteOff]: [dart.void, [core.num]], | |
876 [dartx.noteOn]: [dart.void, [core.num]], | |
877 [dartx.setPeriodicWave]: [dart.void, [PeriodicWave]], | |
878 [dartx.start]: [dart.void, [], [core.num]], | |
879 [dartx.stop]: [dart.void, [], [core.num]] | |
880 }) | |
881 }); | |
882 OscillatorNode[dart.metadata] = () => [dart.const(new _metadata.DocsEditable()
), dart.const(new _metadata.DomName('OscillatorNode')), dart.const(new _metadata
.Experimental()), dart.const(new _js_helper.Native("OscillatorNode,Oscillator"))
]; | |
883 OscillatorNode.endedEvent = dart.const(new (html.EventStreamProvider$(html.Eve
nt))('ended')); | |
884 dart.registerExtension(dart.global.OscillatorNode, OscillatorNode); | |
885 dart.defineExtensionNames([ | |
886 'setOrientation', | |
887 'setPosition', | |
888 'setVelocity', | |
889 'coneInnerAngle', | |
890 'coneOuterAngle', | |
891 'coneOuterGain', | |
892 'distanceModel', | |
893 'maxDistance', | |
894 'panningModel', | |
895 'refDistance', | |
896 'rolloffFactor' | |
897 ]); | |
898 class PannerNode extends AudioNode { | |
899 static _() { | |
900 dart.throw(new core.UnsupportedError("Not supported")); | |
901 } | |
902 get [dartx.coneInnerAngle]() { | |
903 return this.coneInnerAngle; | |
904 } | |
905 set [dartx.coneInnerAngle](value) { | |
906 this.coneInnerAngle = value; | |
907 } | |
908 get [dartx.coneOuterAngle]() { | |
909 return this.coneOuterAngle; | |
910 } | |
911 set [dartx.coneOuterAngle](value) { | |
912 this.coneOuterAngle = value; | |
913 } | |
914 get [dartx.coneOuterGain]() { | |
915 return this.coneOuterGain; | |
916 } | |
917 set [dartx.coneOuterGain](value) { | |
918 this.coneOuterGain = value; | |
919 } | |
920 get [dartx.distanceModel]() { | |
921 return this.distanceModel; | |
922 } | |
923 set [dartx.distanceModel](value) { | |
924 this.distanceModel = value; | |
925 } | |
926 get [dartx.maxDistance]() { | |
927 return this.maxDistance; | |
928 } | |
929 set [dartx.maxDistance](value) { | |
930 this.maxDistance = value; | |
931 } | |
932 get [dartx.panningModel]() { | |
933 return this.panningModel; | |
934 } | |
935 set [dartx.panningModel](value) { | |
936 this.panningModel = value; | |
937 } | |
938 get [dartx.refDistance]() { | |
939 return this.refDistance; | |
940 } | |
941 set [dartx.refDistance](value) { | |
942 this.refDistance = value; | |
943 } | |
944 get [dartx.rolloffFactor]() { | |
945 return this.rolloffFactor; | |
946 } | |
947 set [dartx.rolloffFactor](value) { | |
948 this.rolloffFactor = value; | |
949 } | |
950 [dartx.setOrientation](x, y, z) { | |
951 return this.setOrientation(x, y, z); | |
952 } | |
953 [dartx.setPosition](x, y, z) { | |
954 return this.setPosition(x, y, z); | |
955 } | |
956 [dartx.setVelocity](x, y, z) { | |
957 return this.setVelocity(x, y, z); | |
958 } | |
959 } | |
960 dart.setSignature(PannerNode, { | |
961 constructors: () => ({_: [PannerNode, []]}), | |
962 methods: () => ({ | |
963 [dartx.setOrientation]: [dart.void, [core.num, core.num, core.num]], | |
964 [dartx.setPosition]: [dart.void, [core.num, core.num, core.num]], | |
965 [dartx.setVelocity]: [dart.void, [core.num, core.num, core.num]] | |
966 }) | |
967 }); | |
968 PannerNode[dart.metadata] = () => [dart.const(new _metadata.DocsEditable()), d
art.const(new _metadata.DomName('PannerNode')), dart.const(new _metadata.Experim
ental()), dart.const(new _js_helper.Native("PannerNode,AudioPannerNode,webkitAud
ioPannerNode"))]; | |
969 dart.registerExtension(dart.global.PannerNode, PannerNode); | |
970 class PeriodicWave extends _interceptors.Interceptor { | |
971 static _() { | |
972 dart.throw(new core.UnsupportedError("Not supported")); | |
973 } | |
974 } | |
975 dart.setSignature(PeriodicWave, { | |
976 constructors: () => ({_: [PeriodicWave, []]}) | |
977 }); | |
978 PeriodicWave[dart.metadata] = () => [dart.const(new _metadata.DocsEditable()),
dart.const(new _metadata.DomName('PeriodicWave')), dart.const(new _metadata.Exp
erimental()), dart.const(new _js_helper.Native("PeriodicWave"))]; | |
979 dart.registerExtension(dart.global.PeriodicWave, PeriodicWave); | |
980 dart.defineExtensionNames([ | |
981 'setEventListener', | |
982 'onAudioProcess', | |
983 'bufferSize' | |
984 ]); | |
985 class ScriptProcessorNode extends AudioNode { | |
986 static _() { | |
987 dart.throw(new core.UnsupportedError("Not supported")); | |
988 } | |
989 get [dartx.bufferSize]() { | |
990 return this.bufferSize; | |
991 } | |
992 [dartx.setEventListener](eventListener) { | |
993 return this.setEventListener(eventListener); | |
994 } | |
995 get [dartx.onAudioProcess]() { | |
996 return ScriptProcessorNode.audioProcessEvent.forTarget(this); | |
997 } | |
998 } | |
999 dart.setSignature(ScriptProcessorNode, { | |
1000 constructors: () => ({_: [ScriptProcessorNode, []]}), | |
1001 methods: () => ({[dartx.setEventListener]: [dart.void, [html.EventListener]]
}) | |
1002 }); | |
1003 ScriptProcessorNode[dart.metadata] = () => [dart.const(new _metadata.DocsEdita
ble()), dart.const(new _metadata.DomName('ScriptProcessorNode')), dart.const(new
_metadata.Experimental()), dart.const(new _js_helper.Native("ScriptProcessorNod
e,JavaScriptAudioNode"))]; | |
1004 ScriptProcessorNode.audioProcessEvent = dart.const(new (html.EventStreamProvid
er$(AudioProcessingEvent))('audioprocess')); | |
1005 dart.registerExtension(dart.global.ScriptProcessorNode, ScriptProcessorNode); | |
1006 dart.defineExtensionNames([ | |
1007 'curve', | |
1008 'oversample' | |
1009 ]); | |
1010 class WaveShaperNode extends AudioNode { | |
1011 static _() { | |
1012 dart.throw(new core.UnsupportedError("Not supported")); | |
1013 } | |
1014 get [dartx.curve]() { | |
1015 return this.curve; | |
1016 } | |
1017 set [dartx.curve](value) { | |
1018 this.curve = value; | |
1019 } | |
1020 get [dartx.oversample]() { | |
1021 return this.oversample; | |
1022 } | |
1023 set [dartx.oversample](value) { | |
1024 this.oversample = value; | |
1025 } | |
1026 } | |
1027 dart.setSignature(WaveShaperNode, { | |
1028 constructors: () => ({_: [WaveShaperNode, []]}) | |
1029 }); | |
1030 WaveShaperNode[dart.metadata] = () => [dart.const(new _metadata.DocsEditable()
), dart.const(new _metadata.DomName('WaveShaperNode')), dart.const(new _metadata
.Experimental()), dart.const(new _js_helper.Native("WaveShaperNode"))]; | |
1031 dart.registerExtension(dart.global.WaveShaperNode, WaveShaperNode); | |
1032 // Exports: | |
1033 exports.AudioNode = AudioNode; | |
1034 exports.AnalyserNode = AnalyserNode; | |
1035 exports.AudioBuffer = AudioBuffer; | |
1036 exports.AudioBufferCallback = AudioBufferCallback; | |
1037 exports.AudioSourceNode = AudioSourceNode; | |
1038 exports.AudioBufferSourceNode = AudioBufferSourceNode; | |
1039 exports.AudioContext = AudioContext; | |
1040 exports.AudioDestinationNode = AudioDestinationNode; | |
1041 exports.AudioListener = AudioListener; | |
1042 exports.AudioParam = AudioParam; | |
1043 exports.AudioProcessingEvent = AudioProcessingEvent; | |
1044 exports.BiquadFilterNode = BiquadFilterNode; | |
1045 exports.ChannelMergerNode = ChannelMergerNode; | |
1046 exports.ChannelSplitterNode = ChannelSplitterNode; | |
1047 exports.ConvolverNode = ConvolverNode; | |
1048 exports.DelayNode = DelayNode; | |
1049 exports.DynamicsCompressorNode = DynamicsCompressorNode; | |
1050 exports.GainNode = GainNode; | |
1051 exports.MediaElementAudioSourceNode = MediaElementAudioSourceNode; | |
1052 exports.MediaStreamAudioDestinationNode = MediaStreamAudioDestinationNode; | |
1053 exports.MediaStreamAudioSourceNode = MediaStreamAudioSourceNode; | |
1054 exports.OfflineAudioCompletionEvent = OfflineAudioCompletionEvent; | |
1055 exports.OfflineAudioContext = OfflineAudioContext; | |
1056 exports.OscillatorNode = OscillatorNode; | |
1057 exports.PannerNode = PannerNode; | |
1058 exports.PeriodicWave = PeriodicWave; | |
1059 exports.ScriptProcessorNode = ScriptProcessorNode; | |
1060 exports.WaveShaperNode = WaveShaperNode; | |
1061 }); | |
OLD | NEW |