| Index: LayoutTests/webaudio/scriptprocessornode.html
|
| diff --git a/LayoutTests/webaudio/scriptprocessornode.html b/LayoutTests/webaudio/scriptprocessornode.html
|
| index c65ddc6fa62754c66f1640064280e7d2c6bea9f2..4883400c986f766a763d60405166abca9403cfec 100644
|
| --- a/LayoutTests/webaudio/scriptprocessornode.html
|
| +++ b/LayoutTests/webaudio/scriptprocessornode.html
|
| @@ -46,18 +46,10 @@ function processAudioData(event) {
|
| var allowedTimeGap = 0.0000001;
|
|
|
| // There may be a little time gap which is from different thread operation
|
| - // between currentTime when main thread fires onaudioprocess() and currenTime when read in JS
|
| - // since currentTime is continuously increasing on audio thread. And caching of the currentTime
|
| - // can cause playbackTime to be one block behind. So allow for that.
|
| + // between currentTime when main thread fires onaudioprocess() and currentTime when read in JS
|
| + // since currentTime is continuously increasing on audio thread.
|
|
|
| - var closeEnough = Math.abs(playbackTime - expectedTime) <= allowedTimeGap;
|
| - closeEnough = closeEnough || (Math.abs(playbackTime - (expectedTime - 128 / context.sampleRate)) <= allowedTimeGap);
|
| -
|
| - if (!closeEnough) {
|
| - testFailed("playbackTime should be within " + allowedTimeGap + " of either "
|
| - + expectedTime + " or " + (expectedTime - 128 / context.sampleRate)
|
| - + ". Was " + playbackTime);
|
| - }
|
| + shouldBeCloseTo("playbackTime", expectedTime, allowedTimeGap, true);
|
|
|
| buffer = event.outputBuffer;
|
| if (buffer.numberOfChannels != outputChannels)
|
|
|