| OLD | NEW |
| (Empty) | |
| 1 <!doctype html> |
| 2 <html> |
| 3 <head> |
| 4 <title>Test Handling of Tail Processing</title> |
| 5 <script src="../../resources/testharness.js"></script> |
| 6 <script src="../../resources/testharnessreport.js"></script> |
| 7 <script src="../resources/audit.js"></script> |
| 8 <script src="../resources/audit-util.js"></script> |
| 9 </head> |
| 10 |
| 11 <body> |
| 12 <script> |
| 13 let audit = Audit.createTaskRunner(); |
| 14 |
| 15 // Fairly arbitrary but must be a power of two to eliminate roundoff when |
| 16 // we compute times from sample frames |
| 17 const sampleRate = 32768; |
| 18 |
| 19 // Fairly arbitrary duration |
| 20 const renderDuration = 0.25; |
| 21 const renderFrames = renderDuration * sampleRate; |
| 22 |
| 23 audit.define('hrtf-panner-tail', (task, should) => { |
| 24 runTest('PannerNode', {panningModel: 'HRTF', distanceMode: 'linear'}) |
| 25 .then(renderedBuffer => { |
| 26 let prefix = 'HRTF PannerNode'; |
| 27 let output = renderedBuffer.getChannelData(0); |
| 28 let response = renderedBuffer.getChannelData(1); |
| 29 let latencyFrame = findLatencyFrame(response); |
| 30 let tailFrame = findTailFrame(response); |
| 31 |
| 32 // The HRTF panner has both a latency component and a tail |
| 33 // component. Make sure both are non-zero. |
| 34 should(latencyFrame, `${prefix} latency frame (${latencyFrame})`) |
| 35 .beGreaterThan(0); |
| 36 |
| 37 should(tailFrame, `${prefix} tail frame (${tailFrame})`) |
| 38 .beGreaterThan(0); |
| 39 |
| 40 // Because of the latency, the output is zero at the beginning. |
| 41 // Make sure this is true. |
| 42 should( |
| 43 output.slice(0, latencyFrame), |
| 44 `${prefix} Latency output[0:` + (latencyFrame - 1) + ']') |
| 45 .beConstantValueOf(0); |
| 46 |
| 47 // Verify the rest of the output matches the expected values. The |
| 48 // output should be non-zero from latencyFrame to tailFrame and |
| 49 // zero after tailFrame. |
| 50 verifyOutput(should, output, { |
| 51 prefix: prefix, |
| 52 startFrame: latencyFrame, |
| 53 nonZeroEndFrame: Math.min(tailFrame, output.length), |
| 54 zeroStartFrame: roundUp(tailFrame), |
| 55 tailFrame: tailFrame, |
| 56 reference: response |
| 57 }); |
| 58 }) |
| 59 .then(() => task.done()); |
| 60 }); |
| 61 |
| 62 audit.define('biquad-tail', (task, should) => { |
| 63 runTest('BiquadFilterNode', {Q: 20, frequency: 100}) |
| 64 .then(renderedBuffer => { |
| 65 let prefix = 'BiquadFilter' |
| 66 let output = renderedBuffer.getChannelData(0); |
| 67 let response = renderedBuffer.getChannelData(1); |
| 68 let tailFrame = findTailFrame(response); |
| 69 |
| 70 should(tailFrame, `${prefix} tail frame (${tailFrame})`) |
| 71 .beGreaterThan(0); |
| 72 |
| 73 // Verify biquad output which should be non-zero up to tailFrame |
| 74 // and zero afterwards. However, the actual output isn't after |
| 75 // tailFrame because the internal biquad tail time uses an |
| 76 // approximation. That's why zeroStartFrame is 128 frames after |
| 77 // tailFrame. |
| 78 verifyOutput(should, output, { |
| 79 prefix: prefix, |
| 80 startFrame: 0, |
| 81 nonZeroEndFrame: Math.min(tailFrame + 128, output.length), |
| 82 zeroStartFrame: 128 + roundUp(tailFrame), |
| 83 tailFrame: tailFrame, |
| 84 reference: response |
| 85 }); |
| 86 }) |
| 87 .then(() => task.done()); |
| 88 }); |
| 89 |
| 90 audit.define('iir-tail', (task, should) => { |
| 91 runTest('IIRFilterNode', {feedforward: [1], feedback: [1, -.99]}) |
| 92 .then(renderedBuffer => { |
| 93 let prefix = 'IIRFilter'; |
| 94 let output = renderedBuffer.getChannelData(0); |
| 95 let response = renderedBuffer.getChannelData(1); |
| 96 let tailFrame = findTailFrame(response); |
| 97 |
| 98 should(tailFrame, `${prefix} tail frame (${tailFrame})`) |
| 99 .beGreaterThan(0); |
| 100 |
| 101 verifyOutput(should, output, { |
| 102 prefix: prefix, |
| 103 startFrame: 0, |
| 104 nonZeroEndFrame: Math.min(tailFrame + 128, output.length), |
| 105 zeroStartFrame: 256 + roundUp(tailFrame), |
| 106 tailFrame: tailFrame, |
| 107 reference: response |
| 108 }); |
| 109 }) |
| 110 .then(() => task.done()); |
| 111 }); |
| 112 |
| 113 audit.define('delay-tail', (task, should) => { |
| 114 // For the test, make sure the delay is greater than one render |
| 115 // quantum. If it's less we won't be able to tell if tail processing |
| 116 // worked because the input signal is an impulse. |
| 117 let delayFrames = RENDER_QUANTUM_FRAMES + 64; |
| 118 runTest('DelayNode', {delayTime: delayFrames / sampleRate}) |
| 119 .then(renderedBuffer => { |
| 120 let prefix = 'Delay'; |
| 121 let output = renderedBuffer.getChannelData(0); |
| 122 let response = renderedBuffer.getChannelData(1); |
| 123 let tailFrame = findTailFrame(response); |
| 124 |
| 125 should(tailFrame, `${prefix} tail frame (${tailFrame})`) |
| 126 .beGreaterThan(0); |
| 127 |
| 128 // As a delay node with delay time greater than one render |
| 129 // quantum, the first render quantum must be 0. |
| 130 should(output.slice(0, RENDER_QUANTUM_FRAMES), |
| 131 `${prefix} output[0:` + (RENDER_QUANTUM_FRAMES - 1) + ']') |
| 132 .beConstantValueOf(0); |
| 133 |
| 134 // The output of the delay node should be nonzero in the second |
| 135 // render quantum and zero forever after. |
| 136 verifyOutput(should, output, { |
| 137 prefix: prefix, |
| 138 startFrame: RENDER_QUANTUM_FRAMES, |
| 139 nonZeroEndFrame: Math.min(tailFrame, output.length), |
| 140 zeroStartFrame: roundUp(tailFrame), |
| 141 tailFrame: tailFrame, |
| 142 reference: response |
| 143 }); |
| 144 |
| 145 }) |
| 146 .then(() => task.done()); |
| 147 }); |
| 148 |
| 149 audit.define('convolver-tail', (task, should) => { |
| 150 // The convolver response. It needs to be longer than one render |
| 151 // quantum to show the tail processing. |
| 152 let response = new AudioBuffer({length: RENDER_QUANTUM_FRAMES + 64, |
| 153 sampleRate: sampleRate}); |
| 154 // For simplicity, just make the response all ones. |
| 155 response.getChannelData(0).fill(1); |
| 156 |
| 157 runTest('ConvolverNode', {disableNormalization: true, buffer: response}) |
| 158 .then(renderedBuffer => { |
| 159 let prefix = 'Convolver'; |
| 160 let output = renderedBuffer.getChannelData(0); |
| 161 let response = renderedBuffer.getChannelData(1); |
| 162 let tailFrame = findTailFrame(response); |
| 163 |
| 164 should(tailFrame, `${prefix} tail frame (${tailFrame})`) |
| 165 .beGreaterThan(0); |
| 166 |
| 167 verifyOutput(should, output, { |
| 168 prefix: prefix, |
| 169 startFrame: 0, |
| 170 nonZeroEndFrame: Math.min(tailFrame + 128, output.length), |
| 171 zeroStartFrame: 128 + roundUp(tailFrame), |
| 172 tailFrame: tailFrame, |
| 173 reference: response |
| 174 }); |
| 175 }) |
| 176 .then(() => task.done()); |
| 177 }); |
| 178 |
| 179 audit.define('dynamics-compressor-tail', (task, should) => { |
| 180 runTest('DynamicsCompressorNode', {}) |
| 181 .then(renderedBuffer => { |
| 182 let prefix = 'DyamicsCompressor'; |
| 183 let output = renderedBuffer.getChannelData(0); |
| 184 let response = renderedBuffer.getChannelData(1); |
| 185 let tailFrame = findTailFrame(response); |
| 186 |
| 187 should(tailFrame, `${prefix} tail frame (${tailFrame})`) |
| 188 .beGreaterThan(0); |
| 189 |
| 190 let latencyFrame = roundDown(tailFrame - 1); |
| 191 should( |
| 192 output.slice(0, latencyFrame), |
| 193 `${prefix} output[0:` + (latencyFrame - 1) + ']') |
| 194 .beConstantValueOf(0); |
| 195 |
| 196 verifyOutput(should, output, { |
| 197 prefix: prefix, |
| 198 startFrame: latencyFrame, |
| 199 nonZeroEndFrame: Math.min(tailFrame, output.length), |
| 200 zeroStartFrame: roundUp(tailFrame), |
| 201 tailFrame: tailFrame, |
| 202 reference: response |
| 203 }); |
| 204 |
| 205 }) |
| 206 .then(() => task.done()); |
| 207 }); |
| 208 |
| 209 audit.define('waveshaper-tail', (task, should) => { |
| 210 // Fairly arbitrary curve for the WaveShaper |
| 211 let curve = Float32Array.from([-1, -.5, 0, 0.5, 1]); |
| 212 |
| 213 runTest('WaveShaperNode', {curve: curve, oversample: '2x'}) |
| 214 .then(renderedBuffer => { |
| 215 let prefix = 'WaveShaper'; |
| 216 let output = renderedBuffer.getChannelData(0); |
| 217 let response = renderedBuffer.getChannelData(1); |
| 218 let tailFrame = findTailFrame(response); |
| 219 |
| 220 should(tailFrame, `${prefix} tail frame (${tailFrame})`) |
| 221 .beGreaterThan(0); |
| 222 |
| 223 verifyOutput(should, output, { |
| 224 prefix: prefix, |
| 225 startFrame: 0, |
| 226 nonZeroEndFrame: Math.min(tailFrame, output.length), |
| 227 zeroStartFrame: roundUp(tailFrame), |
| 228 tailFrame: tailFrame, |
| 229 reference: response |
| 230 }); |
| 231 }) |
| 232 .then(() => task.done()); |
| 233 }); |
| 234 |
| 235 audit.run(); |
| 236 |
| 237 function runTest(nodeName, nodeOptions) { |
| 238 // Two-channel output. Channel 0 is the test result; channel 1 is the |
| 239 // impulse response that is used to figure out when the tail should |
| 240 // start. |
| 241 let context = new OfflineAudioContext(2, sampleRate, sampleRate); |
| 242 |
| 243 // Merge channels for the destination. |
| 244 let merger = new ChannelMergerNode(context, {numberOfInputs: 2}); |
| 245 merger.connect(context.destination); |
| 246 |
| 247 let src = new ConstantSourceNode(context, {offset: 1}); |
| 248 |
| 249 // Impulse for testing. We want a full buffer so as not to worry about |
| 250 // the source disconnecting prematurely from the filter. |
| 251 let b = new AudioBuffer( |
| 252 {length: context.length, sampleRate: context.sampleRate}); |
| 253 b.getChannelData(0)[0] = 1; |
| 254 let impulse = new AudioBufferSourceNode(context, {buffer: b}); |
| 255 |
| 256 let testNode = new window[nodeName](context, nodeOptions); |
| 257 let refNode = new window[nodeName](context, nodeOptions); |
| 258 |
| 259 src.connect(testNode).connect(merger, 0, 0); |
| 260 impulse.connect(refNode).connect(merger, 0, 1); |
| 261 |
| 262 src.start(); |
| 263 src.stop(1 / context.sampleRate); |
| 264 impulse.start(); |
| 265 |
| 266 return context.startRendering(); |
| 267 } |
| 268 |
| 269 function findTailFrame(response) { |
| 270 let tailFrame = response.length; |
| 271 |
| 272 for (let k = response.length - 1; k >= 0; --k) { |
| 273 if (Math.abs(response[k]) > 1 / 32768) { |
| 274 tailFrame = k + 1; |
| 275 break; |
| 276 } |
| 277 } |
| 278 |
| 279 return tailFrame; |
| 280 } |
| 281 |
| 282 function findLatencyFrame(response) { |
| 283 for (let k = 0; k < response.length; ++k) { |
| 284 if (response[k] != 0) |
| 285 return k; |
| 286 } |
| 287 |
| 288 return response.length; |
| 289 } |
| 290 |
| 291 function verifyOutput(should, output, options) { |
| 292 let prefix = options.prefix || ''; |
| 293 if (options.tailFrame && options.reference) { |
| 294 should( |
| 295 output.slice(0, options.tailFrame), |
| 296 `${prefix} Tail output[0:` + (options.tailFrame - 1) + ']') |
| 297 .beEqualToArray(options.reference.slice(0, options.tailFrame)); |
| 298 } |
| 299 |
| 300 // Verify that |output| is non-zero between |startFrame| and |
| 301 // |nonZeroEndFrame|. |
| 302 for (let k = options.startFrame; k < options.nonZeroEndFrame; |
| 303 k += 128) { |
| 304 should( |
| 305 output.slice(k, k + 128), |
| 306 `${prefix} output[` + k + ':' + (k + 127) + ']') |
| 307 .notBeConstantValueOf(0); |
| 308 } |
| 309 |
| 310 // Verify |output| is zero starting at frame |zeroStartFrame|, inclusive |
| 311 if (options.zeroStartFrame < output.length) { |
| 312 should( |
| 313 output.slice(options.zeroStartFrame), |
| 314 `${prefix} output[` + options.zeroStartFrame + ':]') |
| 315 .beConstantValueOf(0); |
| 316 } |
| 317 } |
| 318 |
| 319 function roundDown(frame) { |
| 320 return 128 * Math.floor(frame / 128); |
| 321 } |
| 322 |
| 323 function roundUp(frame) { |
| 324 return 128 * Math.ceil(frame / 128); |
| 325 } |
| 326 </script> |
| 327 </body> |
| 328 </html> |
| OLD | NEW |