Index: third_party/WebKit/LayoutTests/webaudio/AudioNode/tail-processing.html |
diff --git a/third_party/WebKit/LayoutTests/webaudio/AudioNode/tail-processing.html b/third_party/WebKit/LayoutTests/webaudio/AudioNode/tail-processing.html |
new file mode 100644 |
index 0000000000000000000000000000000000000000..e04d781fc74282412f68abe42fa2e379759ceb10 |
--- /dev/null |
+++ b/third_party/WebKit/LayoutTests/webaudio/AudioNode/tail-processing.html |
@@ -0,0 +1,328 @@ |
+<!doctype html> |
+<html> |
+ <head> |
+ <title>Test Handling of Tail Processing</title> |
+ <script src="../../resources/testharness.js"></script> |
+ <script src="../../resources/testharnessreport.js"></script> |
+ <script src="../resources/audit.js"></script> |
+ <script src="../resources/audit-util.js"></script> |
+ </head> |
+ |
+ <body> |
+ <script> |
+ let audit = Audit.createTaskRunner(); |
+ |
+ // Fairly arbitrary but must be a power of two to eliminate roundoff when |
+ // we compute times from sample frames |
+ const sampleRate = 32768; |
+ |
+ // Fairly arbitrary duration |
+ const renderDuration = 0.25; |
+ const renderFrames = renderDuration * sampleRate; |
+ |
+ audit.define('hrtf-panner-tail', (task, should) => { |
+ runTest('PannerNode', {panningModel: 'HRTF', distanceMode: 'linear'}) |
+ .then(renderedBuffer => { |
+ let prefix = 'HRTF PannerNode'; |
+ let output = renderedBuffer.getChannelData(0); |
+ let response = renderedBuffer.getChannelData(1); |
+ let latencyFrame = findLatencyFrame(response); |
+ let tailFrame = findTailFrame(response); |
+ |
+ // The HRTF panner has both a latency component and a tail |
+ // component. Make sure both are non-zero. |
+ should(latencyFrame, `${prefix} latency frame (${latencyFrame})`) |
+ .beGreaterThan(0); |
+ |
+ should(tailFrame, `${prefix} tail frame (${tailFrame})`) |
+ .beGreaterThan(0); |
+ |
+ // Because of the latency, the output is zero at the beginning. |
+ // Make sure this is true. |
+ should( |
+ output.slice(0, latencyFrame), |
+ `${prefix} Latency output[0:` + (latencyFrame - 1) + ']') |
+ .beConstantValueOf(0); |
+ |
+ // Verify the rest of the output matches the expected values. The |
+ // output should be non-zero from latencyFrame to tailFrame and |
+ // zero after tailFrame. |
+ verifyOutput(should, output, { |
+ prefix: prefix, |
+ startFrame: latencyFrame, |
+ nonZeroEndFrame: Math.min(tailFrame, output.length), |
+ zeroStartFrame: roundUp(tailFrame), |
+ tailFrame: tailFrame, |
+ reference: response |
+ }); |
+ }) |
+ .then(() => task.done()); |
+ }); |
+ |
+ audit.define('biquad-tail', (task, should) => { |
+ runTest('BiquadFilterNode', {Q: 20, frequency: 100}) |
+ .then(renderedBuffer => { |
+ let prefix = 'BiquadFilter' |
+ let output = renderedBuffer.getChannelData(0); |
+ let response = renderedBuffer.getChannelData(1); |
+ let tailFrame = findTailFrame(response); |
+ |
+ should(tailFrame, `${prefix} tail frame (${tailFrame})`) |
+ .beGreaterThan(0); |
+ |
+ // Verify biquad output which should be non-zero up to tailFrame |
+ // and zero afterwards. However, the actual output isn't after |
+ // tailFrame because the internal biquad tail time uses an |
+ // approximation. That's why zeroStartFrame is 128 frames after |
+ // tailFrame. |
+ verifyOutput(should, output, { |
+ prefix: prefix, |
+ startFrame: 0, |
+ nonZeroEndFrame: Math.min(tailFrame + 128, output.length), |
+ zeroStartFrame: 128 + roundUp(tailFrame), |
+ tailFrame: tailFrame, |
+ reference: response |
+ }); |
+ }) |
+ .then(() => task.done()); |
+ }); |
+ |
+ audit.define('iir-tail', (task, should) => { |
+ runTest('IIRFilterNode', {feedforward: [1], feedback: [1, -.99]}) |
+ .then(renderedBuffer => { |
+ let prefix = 'IIRFilter'; |
+ let output = renderedBuffer.getChannelData(0); |
+ let response = renderedBuffer.getChannelData(1); |
+ let tailFrame = findTailFrame(response); |
+ |
+ should(tailFrame, `${prefix} tail frame (${tailFrame})`) |
+ .beGreaterThan(0); |
+ |
+ verifyOutput(should, output, { |
+ prefix: prefix, |
+ startFrame: 0, |
+ nonZeroEndFrame: Math.min(tailFrame + 128, output.length), |
+ zeroStartFrame: 256 + roundUp(tailFrame), |
+ tailFrame: tailFrame, |
+ reference: response |
+ }); |
+ }) |
+ .then(() => task.done()); |
+ }); |
+ |
+ audit.define('delay-tail', (task, should) => { |
+ // For the test, make sure the delay is greater than one render |
+ // quantum. If it's less we won't be able to tell if tail processing |
+ // worked because the input signal is an impulse. |
+ let delayFrames = RENDER_QUANTUM_FRAMES + 64; |
+ runTest('DelayNode', {delayTime: delayFrames / sampleRate}) |
+ .then(renderedBuffer => { |
+ let prefix = 'Delay'; |
+ let output = renderedBuffer.getChannelData(0); |
+ let response = renderedBuffer.getChannelData(1); |
+ let tailFrame = findTailFrame(response); |
+ |
+ should(tailFrame, `${prefix} tail frame (${tailFrame})`) |
+ .beGreaterThan(0); |
+ |
+ // As a delay node with delay time greater than one render |
+ // quantum, the first render quantum must be 0. |
+ should(output.slice(0, RENDER_QUANTUM_FRAMES), |
+ `${prefix} output[0:` + (RENDER_QUANTUM_FRAMES - 1) + ']') |
+ .beConstantValueOf(0); |
+ |
+ // The output of the delay node should be nonzero in the second |
+ // render quantum and zero forever after. |
+ verifyOutput(should, output, { |
+ prefix: prefix, |
+ startFrame: RENDER_QUANTUM_FRAMES, |
+ nonZeroEndFrame: Math.min(tailFrame, output.length), |
+ zeroStartFrame: roundUp(tailFrame), |
+ tailFrame: tailFrame, |
+ reference: response |
+ }); |
+ |
+ }) |
+ .then(() => task.done()); |
+ }); |
+ |
+ audit.define('convolver-tail', (task, should) => { |
+ // The convolver response. It needs to be longer than one render |
+ // quantum to show the tail processing. |
+ let response = new AudioBuffer({length: RENDER_QUANTUM_FRAMES + 64, |
+ sampleRate: sampleRate}); |
+ // For simplicity, just make the response all ones. |
+ response.getChannelData(0).fill(1); |
+ |
+ runTest('ConvolverNode', {disableNormalization: true, buffer: response}) |
+ .then(renderedBuffer => { |
+ let prefix = 'Convolver'; |
+ let output = renderedBuffer.getChannelData(0); |
+ let response = renderedBuffer.getChannelData(1); |
+ let tailFrame = findTailFrame(response); |
+ |
+ should(tailFrame, `${prefix} tail frame (${tailFrame})`) |
+ .beGreaterThan(0); |
+ |
+ verifyOutput(should, output, { |
+ prefix: prefix, |
+ startFrame: 0, |
+ nonZeroEndFrame: Math.min(tailFrame + 128, output.length), |
+ zeroStartFrame: 128 + roundUp(tailFrame), |
+ tailFrame: tailFrame, |
+ reference: response |
+ }); |
+ }) |
+ .then(() => task.done()); |
+ }); |
+ |
+ audit.define('dynamics-compressor-tail', (task, should) => { |
+ runTest('DynamicsCompressorNode', {}) |
+ .then(renderedBuffer => { |
+ let prefix = 'DyamicsCompressor'; |
+ let output = renderedBuffer.getChannelData(0); |
+ let response = renderedBuffer.getChannelData(1); |
+ let tailFrame = findTailFrame(response); |
+ |
+ should(tailFrame, `${prefix} tail frame (${tailFrame})`) |
+ .beGreaterThan(0); |
+ |
+ let latencyFrame = roundDown(tailFrame - 1); |
+ should( |
+ output.slice(0, latencyFrame), |
+ `${prefix} output[0:` + (latencyFrame - 1) + ']') |
+ .beConstantValueOf(0); |
+ |
+ verifyOutput(should, output, { |
+ prefix: prefix, |
+ startFrame: latencyFrame, |
+ nonZeroEndFrame: Math.min(tailFrame, output.length), |
+ zeroStartFrame: roundUp(tailFrame), |
+ tailFrame: tailFrame, |
+ reference: response |
+ }); |
+ |
+ }) |
+ .then(() => task.done()); |
+ }); |
+ |
+ audit.define('waveshaper-tail', (task, should) => { |
+ // Fairly arbitrary curve for the WaveShaper |
+ let curve = Float32Array.from([-1, -.5, 0, 0.5, 1]); |
+ |
+ runTest('WaveShaperNode', {curve: curve, oversample: '2x'}) |
+ .then(renderedBuffer => { |
+ let prefix = 'WaveShaper'; |
+ let output = renderedBuffer.getChannelData(0); |
+ let response = renderedBuffer.getChannelData(1); |
+ let tailFrame = findTailFrame(response); |
+ |
+ should(tailFrame, `${prefix} tail frame (${tailFrame})`) |
+ .beGreaterThan(0); |
+ |
+ verifyOutput(should, output, { |
+ prefix: prefix, |
+ startFrame: 0, |
+ nonZeroEndFrame: Math.min(tailFrame, output.length), |
+ zeroStartFrame: roundUp(tailFrame), |
+ tailFrame: tailFrame, |
+ reference: response |
+ }); |
+ }) |
+ .then(() => task.done()); |
+ }); |
+ |
+ audit.run(); |
+ |
+ function runTest(nodeName, nodeOptions) { |
+ // Two-channel output. Channel 0 is the test result; channel 1 is the |
+ // impulse response that is used to figure out when the tail should |
+ // start. |
+ let context = new OfflineAudioContext(2, sampleRate, sampleRate); |
+ |
+ // Merge channels for the destination. |
+ let merger = new ChannelMergerNode(context, {numberOfInputs: 2}); |
+ merger.connect(context.destination); |
+ |
+ let src = new ConstantSourceNode(context, {offset: 1}); |
+ |
+ // Impulse for testing. We want a full buffer so as not to worry about |
+ // the source disconnecting prematurely from the filter. |
+ let b = new AudioBuffer( |
+ {length: context.length, sampleRate: context.sampleRate}); |
+ b.getChannelData(0)[0] = 1; |
+ let impulse = new AudioBufferSourceNode(context, {buffer: b}); |
+ |
+ let testNode = new window[nodeName](context, nodeOptions); |
+ let refNode = new window[nodeName](context, nodeOptions); |
+ |
+ src.connect(testNode).connect(merger, 0, 0); |
+ impulse.connect(refNode).connect(merger, 0, 1); |
+ |
+ src.start(); |
+ src.stop(1 / context.sampleRate); |
+ impulse.start(); |
+ |
+ return context.startRendering(); |
+ } |
+ |
+ function findTailFrame(response) { |
+ let tailFrame = response.length; |
+ |
+ for (let k = response.length - 1; k >= 0; --k) { |
+ if (Math.abs(response[k]) > 1 / 32768) { |
+ tailFrame = k + 1; |
+ break; |
+ } |
+ } |
+ |
+ return tailFrame; |
+ } |
+ |
+ function findLatencyFrame(response) { |
+ for (let k = 0; k < response.length; ++k) { |
+ if (response[k] != 0) |
+ return k; |
+ } |
+ |
+ return response.length; |
+ } |
+ |
+ function verifyOutput(should, output, options) { |
+ let prefix = options.prefix || ''; |
+ if (options.tailFrame && options.reference) { |
+ should( |
+ output.slice(0, options.tailFrame), |
+ `${prefix} Tail output[0:` + (options.tailFrame - 1) + ']') |
+ .beEqualToArray(options.reference.slice(0, options.tailFrame)); |
+ } |
+ |
+ // Verify that |output| is non-zero between |startFrame| and |
+ // |nonZeroEndFrame|. |
+ for (let k = options.startFrame; k < options.nonZeroEndFrame; |
+ k += 128) { |
+ should( |
+ output.slice(k, k + 128), |
+ `${prefix} output[` + k + ':' + (k + 127) + ']') |
+ .notBeConstantValueOf(0); |
+ } |
+ |
+ // Verify |output| is zero starting at frame |zeroStartFrame|, inclusive |
+ if (options.zeroStartFrame < output.length) { |
+ should( |
+ output.slice(options.zeroStartFrame), |
+ `${prefix} output[` + options.zeroStartFrame + ':]') |
+ .beConstantValueOf(0); |
+ } |
+ } |
+ |
+ function roundDown(frame) { |
+ return 128 * Math.floor(frame / 128); |
+ } |
+ |
+ function roundUp(frame) { |
+ return 128 * Math.ceil(frame / 128); |
+ } |
+ </script> |
+ </body> |
+</html> |