Index: third_party/WebKit/LayoutTests/webaudio/resources/audioparam-testing.js |
diff --git a/third_party/WebKit/LayoutTests/webaudio/resources/audioparam-testing.js b/third_party/WebKit/LayoutTests/webaudio/resources/audioparam-testing.js |
index b0461fc3a29421e27457719528c231ac90e39112..2132217c8823b4f266b28f2d9f6d10877d16b3fb 100644 |
--- a/third_party/WebKit/LayoutTests/webaudio/resources/audioparam-testing.js |
+++ b/third_party/WebKit/LayoutTests/webaudio/resources/audioparam-testing.js |
@@ -219,7 +219,7 @@ function valueUpdate(timeIntervalIndex) |
} |
// Compare a section of the rendered data against our expected signal. |
-function comparePartialSignals(rendered, expectedFunction, startTime, endTime, valueInfo, sampleRate, errorMetric) |
+function comparePartialSignals(should, rendered, expectedFunction, startTime, endTime, valueInfo, sampleRate, errorMetric) |
{ |
var startSample = timeToSampleFrame(startTime, sampleRate); |
var expected = expectedFunction(startTime, endTime, valueInfo.startValue, valueInfo.endValue, sampleRate, timeConstant); |
@@ -234,13 +234,19 @@ function comparePartialSignals(rendered, expectedFunction, startTime, endTime, v |
if (!isValidNumber(rendered[startSample + k])) { |
maxError = Infinity; |
maxErrorIndex = startSample + k; |
- testFailed("NaN or infinity for rendered data at " + maxErrorIndex); |
+ //testFailed("NaN or infinity for rendered data at " + maxErrorIndex); |
hongchan
2017/01/30 17:38:04
Remove this line.
|
+ should(!isValidNumber(rendered[startSample + k]), |
hongchan
2017/01/30 17:38:04
Can this be |isValidNumber()| then |beTrue()|?
Sa
Raymond Toy
2017/01/30 18:07:02
I thought it would be better to keep the test cond
|
+ "NaN or infinity for rendered data at " + maxErrorIndex) |
+ .beFalse(); |
break; |
} |
if (!isValidNumber(expected[k])) { |
maxError = Infinity; |
maxErrorIndex = startSample + k; |
- testFailed("Nan or infinity for reference data at " + maxErrorIndex); |
+ //testFailed("Nan or infinity for reference data at " + maxErrorIndex); |
hongchan
2017/01/30 17:38:04
Ditto.
|
+ should(!isValidNumber(expected[k]), |
+ "NaN or infinity for rendered data at " + maxErrorIndex) |
+ .beFalse(); |
break; |
} |
var error = Math.abs(errorMetric(rendered[startSample + k], expected[k])); |
@@ -256,7 +262,7 @@ function comparePartialSignals(rendered, expectedFunction, startTime, endTime, v |
// Find the discontinuities in the data and compare the locations of the discontinuities with the |
// times that define the time intervals. There is a discontinuity if the difference between |
// successive samples exceeds the threshold. |
-function verifyDiscontinuities(values, times, threshold) |
+function verifyDiscontinuities(should, values, times, threshold) |
{ |
var n = values.length; |
var success = true; |
@@ -281,7 +287,10 @@ function verifyDiscontinuities(values, times, threshold) |
// is too low, every sample frame looks like a discontinuity. |
if (breaks.length >= numberOfTests) { |
testCount = numberOfTests - 1; |
- testFailed("Found more discontinuities (" + breaks.length + ") than expected. Only comparing first " + testCount + "discontinuities."); |
+ //testFailed("Found more discontinuities (" + breaks.length + ") than expected. Only comparing first " + testCount + "discontinuities."); |
+ should(breaks.length, |
+ "Number of discontinuities") |
+ .beLessThan(numberOfTests); |
success = false; |
} else { |
testCount = breaks.length; |
@@ -294,20 +303,30 @@ function verifyDiscontinuities(values, times, threshold) |
if (breaks[k] != expectedSampleFrame) { |
success = false; |
++badLocations; |
- testFailed("Expected discontinuity at " + expectedSampleFrame + " but got " + breaks[k]); |
+ //testFailed("Expected discontinuity at " + expectedSampleFrame + " but got " + breaks[k]); |
+ should(breaks[k], |
+ "Discontinuity at index") |
+ .beEqualTo(expectedSampleFrame); |
} |
} |
if (badLocations) { |
- testFailed(badLocations + " discontinuities at incorrect locations"); |
+ //testFailed(badLocations + " discontinuities at incorrect locations"); |
+ should(badLocations, "Number of discontinuites at incorrect locations") |
+ .beEqualTo(0); |
success = false; |
} else { |
+ /* |
if (breaks.length == numberOfTests - 1) { |
testPassed("All " + numberOfTests + " tests started and ended at the correct time."); |
} else { |
testFailed("Found " + breaks.length + " discontinuities but expected " + (numberOfTests - 1)); |
success = false; |
} |
+ */ |
+ should(breaks.length + 1, |
+ "Number of tests started and ended at the correct time") |
+ .beEqualTo(numberOfTests); |
} |
return success; |
@@ -327,7 +346,7 @@ function verifyDiscontinuities(values, times, threshold) |
// values of each interval. |
// |
// breakThreshold - threshold to use for determining discontinuities. |
-function compareSignals(testName, maxError, renderedData, expectedFunction, timeValueInfo, breakThreshold, errorMetric) |
+function compareSignals(should, testName, maxError, renderedData, expectedFunction, timeValueInfo, breakThreshold, errorMetric) |
{ |
var success = true; |
var failedTestCount = 0; |
@@ -336,13 +355,14 @@ function compareSignals(testName, maxError, renderedData, expectedFunction, time |
var n = values.length; |
var expectedSignal = []; |
- success = verifyDiscontinuities(renderedData, times, breakThreshold); |
+ success = verifyDiscontinuities(should, renderedData, times, breakThreshold); |
for (var k = 0; k < n; ++k) { |
- var result = comparePartialSignals(renderedData, expectedFunction, times[k], times[k + 1], values[k], sampleRate, errorMetric); |
+ var result = comparePartialSignals(should, renderedData, expectedFunction, times[k], times[k + 1], values[k], sampleRate, errorMetric); |
expectedSignal = expectedSignal.concat(Array.prototype.slice.call(result.expected)); |
+ /* |
hongchan
2017/01/30 17:38:04
Remove commented line.
|
if (result.maxError > maxError) { |
var offset = result.index + timeToSampleFrame(times[k], sampleRate); |
testFailed("Incorrect value for test " + k + ". Max error = " + result.maxError |
@@ -351,20 +371,31 @@ function compareSignals(testName, maxError, renderedData, expectedFunction, time |
+ ", expected = " + expectedSignal[offset] + "."); |
++failedTestCount; |
} |
+ */ |
+ should(result.maxError, |
+ "Max error for test " + k + " at offset " + (result.index + timeToSampleFrame(times[k], sampleRate))) |
+ .beLessThanOrEqualTo(maxError); |
} |
+ /* |
hongchan
2017/01/30 17:38:04
Ditto.
|
if (failedTestCount) { |
testFailed(failedTestCount + " tests failed out of " + n); |
success = false; |
} else { |
testPassed("All " + n + " tests passed within an acceptable relative tolerance of " + maxError + "."); |
} |
+ */ |
+ should(failedTestCount, |
+ "Number of failed tests with an acceptable relative tolerance of " + maxError) |
+ .beEqualTo(0); |
+ /* |
hongchan
2017/01/30 17:38:04
Ditto.
|
if (success) { |
testPassed("AudioParam " + testName + " test passed."); |
} else { |
testFailed("AudioParam " + testName + " test failed."); |
} |
+ */ |
} |
// Create a function to test the rendered data with the reference data. |
@@ -379,7 +410,7 @@ function compareSignals(testName, maxError, renderedData, expectedFunction, time |
// jumpThreshold - optional parameter that specifies the threshold to use for detecting |
// discontinuities. If not specified, defaults to discontinuityThreshold. |
// |
-function checkResultFunction(testName, error, referenceFunction, jumpThreshold, errorMetric) |
+function checkResultFunction(task, should, testName, error, referenceFunction, jumpThreshold, errorMetric) |
{ |
return function(event) { |
var buffer = event.renderedBuffer; |
@@ -393,9 +424,8 @@ function checkResultFunction(testName, error, referenceFunction, jumpThreshold, |
threshold = jumpThreshold; |
} |
- compareSignals(testName, error, renderedData, referenceFunction, timeValueInfo, threshold, errorMetric); |
- |
- finishJSTest(); |
+ compareSignals(should, testName, error, renderedData, referenceFunction, timeValueInfo, threshold, errorMetric); |
+ task.done(); |
} |
} |
@@ -459,15 +489,8 @@ function doAutomation(numberOfTests, initialValue, setValueFunction, automationF |
// jumpThreshold - optional parameter that specifies the threshold to use for detecting |
// discontinuities. If not specified, defaults to discontinuityThreshold. |
// |
-function createAudioGraphAndTest(numberOfTests, initialValue, setValueFunction, automationFunction, testName, maxError, referenceFunction, jumpThreshold, errorMetric) |
+function createAudioGraphAndTest(task, should, numberOfTests, initialValue, setValueFunction, automationFunction, testName, maxError, referenceFunction, jumpThreshold, errorMetric) |
{ |
- if (window.testRunner) { |
- testRunner.dumpAsText(); |
- testRunner.waitUntilDone(); |
- } |
- |
- window.jsTestIsAsync = true; |
- |
// Create offline audio context. |
context = new OfflineAudioContext(2, renderLength(numberOfTests), sampleRate); |
var constantBuffer = createConstantBuffer(context, renderLength(numberOfTests), 1); |
@@ -496,7 +519,7 @@ function createAudioGraphAndTest(numberOfTests, initialValue, setValueFunction, |
automationFunction); |
bufferSource.start(0); |
- context.oncomplete = checkResultFunction(testName, |
+ context.oncomplete = checkResultFunction(task, should, testName, |
maxError, |
referenceFunction, |
jumpThreshold, |