Chromium Code Reviews| Index: third_party/WebKit/LayoutTests/webaudio/ScriptProcessor/scriptprocessornode.html |
| diff --git a/third_party/WebKit/LayoutTests/webaudio/ScriptProcessor/scriptprocessornode.html b/third_party/WebKit/LayoutTests/webaudio/ScriptProcessor/scriptprocessornode.html |
| index 5bc37cea145122466e89671fe759deba11898bf3..dbdec638448cb59ffc6f1d0bfb0fc903d8886303 100644 |
| --- a/third_party/WebKit/LayoutTests/webaudio/ScriptProcessor/scriptprocessornode.html |
| +++ b/third_party/WebKit/LayoutTests/webaudio/ScriptProcessor/scriptprocessornode.html |
| @@ -1,171 +1,134 @@ |
| <!DOCTYPE html> |
| <html> |
| <head> |
| -<script src="../../resources/js-test.js"></script> |
| +<script src="../../resources/testharness.js"></script> |
| +<script src="../../resources/testharnessreport.js"></script> |
| <script src="../resources/audit-util.js"></script> |
| -<script src="../resources/audio-testing.js"></script> |
| +<script src="../resources/audit.js"></script> |
| </head> |
| <body> |
| - |
| -<div id="description"></div> |
| -<div id="console"></div> |
| - |
| <script> |
| -description("Tests ScriptProcessorNode."); |
| +let audit = Audit.createTaskRunner(); |
| -var sampleRate = 44100.0; |
| -var outputChannels = 6; |
| -var playbackTime = 0.0; |
| +let sampleRate = 44100.0; |
| +let outputChannels = 6; |
| +let playbackTime = 0.0; |
| -// For the current implementation of ScriptProcessorNode, when it works with OfflineAudioContext (which runs much faster |
| -// than real-time) the event.inputBuffer might be overwrite again before onaudioprocess ever get chance to be called. |
| -// We carefully arrange the renderLengthInFrames and bufferSize to have exactly the same value to avoid this issue. |
| -var renderLengthInFrames = 512; |
| -var bufferSize = 512; |
| +// For the current implementation of ScriptProcessorNode, when it works with |
| +// OfflineAudioContext (which runs much faster than real-time) the |
| +// event.inputBuffer might be overwrite again before onaudioprocess ever get |
| +// chance to be called. We carefully arrange the renderLengthInFrames and |
| +// bufferSize to have exactly the same value to avoid this issue. |
|
hongchan
2017/02/24 23:12:46
This is not true anymore. Now we have suspend/resu
|
| +let renderLengthInFrames = 512; |
| +let bufferSize = 512; |
| -var context; |
| +let context; |
| function createBuffer(context, length) { |
| - var audioBuffer = context.createBuffer(2, length, sampleRate); |
| - var n = audioBuffer.length; |
| - var dataL = audioBuffer.getChannelData(0); |
| - var dataR = audioBuffer.getChannelData(1); |
| + let audioBuffer = context.createBuffer(2, length, sampleRate); |
| + let n = audioBuffer.length; |
| + let dataL = audioBuffer.getChannelData(0); |
| + let dataR = audioBuffer.getChannelData(1); |
| - for (var i = 0; i < n; ++i) { |
| - dataL[i] = -1; |
| - dataR[i] = 1; |
| - } |
| + for (let i = 0; i < n; ++i) { |
| + dataL[i] = -1; |
| + dataR[i] = 1; |
| + } |
| - return audioBuffer; |
| + return audioBuffer; |
| } |
| -function processAudioData(event) { |
| - playbackTime = event.playbackTime; |
| - var expectedTime = context.currentTime + (bufferSize / context.sampleRate); |
| - var allowedTimeGap = 0.0000001; |
| - |
| - // There may be a little time gap which is from different thread operation |
| - // between currentTime when main thread fires onaudioprocess() and currentTime when read in JS |
| - // since currentTime is continuously increasing on audio thread. |
| - |
| - shouldBeCloseTo("playbackTime", expectedTime, allowedTimeGap, true); |
| - |
| - buffer = event.outputBuffer; |
| - if (buffer.numberOfChannels != outputChannels) |
| - testFailed("numberOfOutputChannels doesn't match!"); |
| - |
| - if (buffer.length != bufferSize) |
| - testFailed("numberOfOutputChannels doesn't match!"); |
| - |
| - buffer = event.inputBuffer; |
| - var bufferDataL = buffer.getChannelData(0); |
| - var bufferDataR = buffer.getChannelData(1); |
| - |
| - var success = true; |
| - // Go through every sample and make sure it's all -1 for the left-channel, and all +1 for the right-channel. |
| - for (var i = 0; i < buffer.length; ++i) { |
| - if (bufferDataL[i] != -1 || bufferDataR[i] != 1) { |
| - success = false; |
| - break; |
| - } |
| - } |
| - |
| - if (success) { |
| - testPassed("onaudioprocess was called with correct data."); |
| - } else { |
| - testFailed("onaudioprocess was called with wrong data."); |
| - } |
| -} |
| +function processAudioData(event, should) { |
| + playbackTime = event.playbackTime; |
| + let expectedTime = context.currentTime + (bufferSize / context.sampleRate); |
| + let allowedTimeGap = 0.0000001; |
| + |
| + // There may be a little time gap which is from different thread operation |
| + // between currentTime when main thread fires onaudioprocess() and currentTime |
| + // when read in JS since currentTime is continuously increasing on audio |
| + // thread. |
| + |
| + should(playbackTime, 'playbackTime').beCloseTo(expectedTime, { |
| + threshold: allowedTimeGap |
| + }); |
| -function doBufferSizeTest(size) { |
| - try { |
| - var jsnode = context.createScriptProcessor(size, 1, 1); |
| - testPassed("Successfully created ScriptProcessorNode with bufferSize = " + size + "."); |
| - } catch(e) { |
| - testFailed("Failed to create ScriptProcessorNode with bufferSize = " + size + "."); |
| - } |
| + buffer = event.outputBuffer; |
| + should(buffer.numberOfChannels, 'Number of output channels') |
| + .beEqualTo(outputChannels); |
| + should(buffer.length, 'Length of buffer').beEqualTo(bufferSize); |
| + |
| + buffer = event.inputBuffer; |
| + let bufferDataL = buffer.getChannelData(0); |
| + let bufferDataR = buffer.getChannelData(1); |
| + |
| + should(bufferDataL, 'Left channel').beConstantValueOf(-1); |
| + should(bufferDataR, 'Right channel').beConstantValueOf(1); |
| } |
| -function runTest() { |
| - if (window.testRunner) { |
| - testRunner.dumpAsText(); |
| - testRunner.waitUntilDone(); |
| - } |
| - |
| - window.jsTestIsAsync = true; |
| - |
| - // Create offline audio context. |
| - context = new OfflineAudioContext(2, renderLengthInFrames, sampleRate); |
| - |
| - try { |
| - var jsnode = context.createScriptProcessor(512, 0, 0); |
| - testFailed("Exception should be thrown when both numberOfInputChannels and numberOfOutputChannels are zero."); |
| - } catch(e) { |
| - testPassed("Exception was thrown when both numberOfInputChannels and numberOfOutputChannels are zero."); |
| - } |
| - |
| - try { |
| - var jsnode = context.createScriptProcessor(512, 1, 0); |
| - testPassed("Successfully created ScriptProcessorNode with numberOfInputChannels = 1 and numberOfOutputChannels = 0."); |
| - } catch(e) { |
| - testFailed("Exception should not be thrown when numberOfInputChannels = 1 and numberOfOutputChannels = 0."); |
| - } |
| - |
| - try { |
| - var jsnode = context.createScriptProcessor(512, 2, 0); |
| - testPassed("Successfully created ScriptProcessorNode with numberOfInputChannels = 2 and numberOfOutputChannels = 0."); |
| - } catch(e) { |
| - testFailed("Exception should not be thrown when numberOfInputChannels = 2 and numberOfOutputChannels = 0."); |
| - } |
| - |
| - try { |
| - var jsnode = context.createScriptProcessor(512, 0, 1); |
| - testPassed("Successfully created ScriptProcessorNode with numberOfInputChannels = 0 and numberOfOutputChannels = 1."); |
| - } catch(e) { |
| - testFailed("Exception should not be thrown when numberOfInputChannels = 0 and numberOfOutputChannels = 1."); |
| - } |
| - |
| - try { |
| - var jsnode = context.createScriptProcessor(512, 0, 2); |
| - testPassed("Successfully created ScriptProcessorNode with numberOfInputChannels = 0 and numberOfOutputChannels = 2."); |
| - } catch(e) { |
| - testFailed("Exception should not be thrown when numberOfInputChannels = 0 and numberOfOutputChannels = 2."); |
| - } |
| - |
| - try { |
| - var jsnode = context.createScriptProcessor(511, 1, 1); |
| - testFailed("Exception should be thrown for illegal bufferSize."); |
| - } catch(e) { |
| - testPassed("Exception was thrown for illegal bufferSize."); |
| - } |
| - |
| - doBufferSizeTest(256); |
| - doBufferSizeTest(512); |
| - doBufferSizeTest(1024); |
| - doBufferSizeTest(2048); |
| - doBufferSizeTest(4096); |
| - doBufferSizeTest(8192); |
| - doBufferSizeTest(16384); |
| - |
| - var sourceBuffer = createBuffer(context, renderLengthInFrames); |
| - |
| - var bufferSource = context.createBufferSource(); |
| - bufferSource.buffer = sourceBuffer; |
| - |
| - var jsnode = context.createScriptProcessor(bufferSize, 2, outputChannels); |
| - |
| - bufferSource.connect(jsnode); |
| - jsnode.connect(context.destination); |
| - jsnode.onaudioprocess = processAudioData; |
| - |
| - bufferSource.start(0); |
| - context.oncomplete = finishJSTest; |
| - context.startRendering(); |
| +function doBufferSizeTest(size, should) { |
| + should(() => { |
| + context.createScriptProcessor(size, 1, 1); |
| + }, 'context.createScriptProcessor(' + size + ', 1, 1)').notThrow(); |
| } |
| -runTest(); |
| +audit.define( |
| + {label: 'test', description: 'Basic ScriptProcessorNode properties'}, |
|
hongchan
2017/02/24 23:12:46
A space needed after the beginning curly brace and
|
| + (task, should) => { |
| + // Create offline audio context. |
| + context = new OfflineAudioContext(2, renderLengthInFrames, sampleRate); |
| + |
| + should(() => { |
| + context.createScriptProcessor(512, 0, 0); |
| + }, 'createScriptProcessor(512, 0, 0)').throw(); |
| + |
| + should(() => { |
| + context.createScriptProcessor(512, 1, 0); |
| + }, 'context.createScriptProcessor(512, 1, 0)').notThrow(); |
| + |
| + should(() => { |
| + context.createScriptProcessor(512, 2, 0); |
| + }, 'context.createScriptProcessor(512, 2, 0)').notThrow(); |
| + |
| + should(() => { |
| + context.createScriptProcessor(512, 0, 1); |
| + }, 'context.createScriptProcessor(512, 0, 1)').notThrow(); |
| + |
| + should(() => { |
| + context.createScriptProcessor(512, 0, 2); |
| + }, 'context.createScriptProcessor(512, 0, 2)').notThrow(); |
| + should(() => { |
| + context.createScriptProcessor(511, 1, 1); |
| + }, 'context.createScriptProcessor(511, 1, 1)').throw(); |
| + |
| + doBufferSizeTest(256, should); |
| + doBufferSizeTest(512, should); |
| + doBufferSizeTest(1024, should); |
| + doBufferSizeTest(2048, should); |
| + doBufferSizeTest(4096, should); |
| + doBufferSizeTest(8192, should); |
| + doBufferSizeTest(16384, should); |
| + |
| + let sourceBuffer = createBuffer(context, renderLengthInFrames); |
| + |
| + let bufferSource = context.createBufferSource(); |
| + bufferSource.buffer = sourceBuffer; |
| + |
| + let jsnode = context.createScriptProcessor(bufferSize, 2, outputChannels); |
| + |
| + bufferSource.connect(jsnode); |
| + jsnode.connect(context.destination); |
| + jsnode.onaudioprocess = event => { |
| + processAudioData(event, should); |
| + }; |
| + |
| + bufferSource.start(0); |
| + |
| + context.startRendering().then(() => task.done()); |
| + ; |
| + }); |
| +audit.run(); |
| </script> |
| </body> |