| Index: LayoutTests/webaudio/offlineaudiocontext-suspend-resume-graph-manipulation.html
|
| diff --git a/LayoutTests/webaudio/offlineaudiocontext-suspend-resume-graph-manipulation.html b/LayoutTests/webaudio/offlineaudiocontext-suspend-resume-graph-manipulation.html
|
| new file mode 100644
|
| index 0000000000000000000000000000000000000000..ae9fabb43bbc999fcd3e5a5b1fb02d104b81eece
|
| --- /dev/null
|
| +++ b/LayoutTests/webaudio/offlineaudiocontext-suspend-resume-graph-manipulation.html
|
| @@ -0,0 +1,84 @@
|
| +<!doctype html>
|
| +<html>
|
| + <head>
|
| + <script src="../resources/js-test.js"></script>
|
| + <script src="resources/compatibility.js"></script>
|
| + <script src="resources/audio-testing.js"></script>
|
| + </head>
|
| +
|
| + <body>
|
| + <script>
|
| + description('Test synchronous graph manipulation with OfflineAudioContext.suspend() and OfflineAudioContext.suspend().');
|
| + window.jsTestIsAsync = true;
|
| +
|
| + var context;
|
| + var renderQuantum = 128;
|
| + var renderDuration = 3;
|
| +
|
| + // The sample rate is multiple of the rendering quantum, so suspension
|
| + // times fall in to the render quantum boundary.
|
| + var sampleRate = renderQuantum * 100;
|
| +
|
| + context = new OfflineAudioContext(1, sampleRate * renderDuration, sampleRate);
|
| +
|
| + // Create a constant buffer of 1.0.
|
| + var constantBuffer = createConstantBuffer(context, 128, 1.0);
|
| + var constantSource = context.createBufferSource();
|
| + constantSource.buffer = constantBuffer;
|
| + constantSource.loop = true;
|
| +
|
| + // The audio output from the beginning (0.0 second) to the first suspend
|
| + // time should be 0.0 because there is no connection to the destination.
|
| +
|
| + // Suspend at 1 second and activate the source node. The audio output
|
| + // should be 1.0 from |suspendTime1| to the next suspension.
|
| + var suspendTime1 = 1;
|
| + context.suspend(suspendTime1).then(function () {
|
| + Should('The first suspend time', context.currentTime).beEqualTo(suspendTime1);
|
| + constantSource.connect(context.destination);
|
| + constantSource.start();
|
| + context.resume();
|
| + });
|
| +
|
| + // Suspend at 2 seconds and disconnect the node. The audio output should
|
| + // be 0.0 from |suspendTime2| to the end.
|
| + var suspendTime2 = 2;
|
| + context.suspend(suspendTime2).then(function () {
|
| + Should('The second suspend time', context.currentTime).beEqualTo(suspendTime2);
|
| + constantSource.disconnect();
|
| + context.resume();
|
| + });
|
| +
|
| + context.startRendering().then(function (buffer) {
|
| + verifyResult(buffer);
|
| + finishJSTest();
|
| + });
|
| +
|
| + function verifyResult(buffer) {
|
| + var data = buffer.getChannelData(0);
|
| +
|
| + var suspendIndex1 = suspendTime1 * sampleRate;
|
| + var suspendIndex2 = suspendTime2 * sampleRate;
|
| + var endIndex = renderDuration * sampleRate;
|
| +
|
| + // Split the rendered buffer into 3 segments:
|
| + // [0, suspendIndex1), [suspendIndex1, suspendIndex2), [suspendIndex2,
|
| + // endIndex).
|
| + var subarray0 = data.subarray(0, suspendIndex1);
|
| + var subarray1 = data.subarray(suspendIndex1, suspendIndex2);
|
| + var subarray2 = data.subarray(suspendIndex2, endIndex);
|
| +
|
| + // Each segment should contain a constant value of 0, 1 and 0
|
| + // respectively.
|
| + Should('Buffer [0, ' + suspendIndex1 + ')', subarray0).beConstantValueOf(0);
|
| + Should('Buffer [' + suspendIndex1 + ', ' + suspendIndex2 + ')', subarray1)
|
| + .beConstantValueOf(1);
|
| + Should('Buffer [' + suspendIndex2 + ', ' + endIndex + ')', subarray2)
|
| + .beConstantValueOf(0);
|
| + }
|
| +
|
| + successfullyParsed = true;
|
| + </script>
|
| +
|
| + </body>
|
| +</html>
|
|
|