| OLD | NEW |
| 1 // There are tests for computeStatistics() located in LayoutTests/fast/harness/p
erftests | 1 // There are tests for computeStatistics() located in LayoutTests/fast/harness/p
erftests |
| 2 | 2 |
| 3 if (window.testRunner) { | 3 if (window.testRunner) { |
| 4 testRunner.waitUntilDone(); | 4 testRunner.waitUntilDone(); |
| 5 testRunner.dumpAsText(); | 5 testRunner.dumpAsText(); |
| 6 } | 6 } |
| 7 | 7 |
| 8 (function () { | 8 (function () { |
| 9 var logLines = null; | 9 var logLines = null; |
| 10 var completedIterations = -1; | 10 var completedIterations = -1; |
| (...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 150 return; | 150 return; |
| 151 } | 151 } |
| 152 currentTest = test; | 152 currentTest = test; |
| 153 iterationCount = test.iterationCount || (window.testRunner ? 5 : 20); | 153 iterationCount = test.iterationCount || (window.testRunner ? 5 : 20); |
| 154 if (test.warmUpCount && test.warmUpCount > 0) | 154 if (test.warmUpCount && test.warmUpCount > 0) |
| 155 completedIterations = -test.warmUpCount; | 155 completedIterations = -test.warmUpCount; |
| 156 logLines = PerfTestRunner.bufferedLog || window.testRunner ? [] : null; | 156 logLines = PerfTestRunner.bufferedLog || window.testRunner ? [] : null; |
| 157 PerfTestRunner.log("Running " + iterationCount + " times"); | 157 PerfTestRunner.log("Running " + iterationCount + " times"); |
| 158 if (test.doNotIgnoreInitialRun) | 158 if (test.doNotIgnoreInitialRun) |
| 159 completedIterations++; | 159 completedIterations++; |
| 160 | 160 if (runner) |
| 161 if (runner && test.tracingCategories && window.testRunner && | |
| 162 window.testRunner.supportTracing) { | |
| 163 window.testRunner.traceEventsToMeasure = test.traceEventsToMeasure; | |
| 164 window.testRunner.startTracing(test.tracingCategories, function() { | |
| 165 scheduleNextRun(scheduler, runner); | |
| 166 }); | |
| 167 } else if (runner) { | |
| 168 scheduleNextRun(scheduler, runner); | 161 scheduleNextRun(scheduler, runner); |
| 169 } | |
| 170 } | 162 } |
| 171 | 163 |
| 172 function scheduleNextRun(scheduler, runner) { | 164 function scheduleNextRun(scheduler, runner) { |
| 173 scheduler(function () { | 165 scheduler(function () { |
| 174 // This will be used by tools/perf/benchmarks/blink_perf.py to find | 166 // This will be used by tools/perf/benchmarks/blink_perf.py to find |
| 175 // traces during the measured runs. | 167 // traces during the measured runs. |
| 176 if (completedIterations >= 0) | 168 if (completedIterations >= 0) |
| 177 console.time("blink_perf"); | 169 console.time("blink_perf"); |
| 178 | 170 |
| 179 try { | 171 try { |
| (...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 226 } | 218 } |
| 227 if (logLines) | 219 if (logLines) |
| 228 logLines.forEach(logInDocument); | 220 logLines.forEach(logInDocument); |
| 229 window.scrollTo(0, document.body.offsetHeight); | 221 window.scrollTo(0, document.body.offsetHeight); |
| 230 if (currentTest.done) | 222 if (currentTest.done) |
| 231 currentTest.done(); | 223 currentTest.done(); |
| 232 } catch (exception) { | 224 } catch (exception) { |
| 233 logInDocument("Got an exception while finalizing the test with name=
" + exception.name + ", message=" + exception.message); | 225 logInDocument("Got an exception while finalizing the test with name=
" + exception.name + ", message=" + exception.message); |
| 234 } | 226 } |
| 235 | 227 |
| 236 if (window.testRunner) { | 228 if (window.testRunner) |
| 237 if (currentTest.traceEventsToMeasure && | 229 testRunner.notifyDone(); |
| 238 testRunner.supportTracing) { | |
| 239 testRunner.stopTracingAndMeasure( | |
| 240 currentTest.traceEventsToMeasure, function() { | |
| 241 testRunner.notifyDone(); | |
| 242 }); | |
| 243 } else { | |
| 244 testRunner.notifyDone(); | |
| 245 } | |
| 246 } | |
| 247 } | 230 } |
| 248 | 231 |
| 249 PerfTestRunner.prepareToMeasureValuesAsync = function (test) { | 232 PerfTestRunner.prepareToMeasureValuesAsync = function (test) { |
| 250 PerfTestRunner.unit = test.unit; | 233 PerfTestRunner.unit = test.unit; |
| 251 start(test); | 234 start(test); |
| 252 } | 235 } |
| 253 | 236 |
| 254 PerfTestRunner.measureValueAsync = function (measuredValue) { | 237 PerfTestRunner.measureValueAsync = function (measuredValue) { |
| 255 completedIterations++; | 238 completedIterations++; |
| 256 | 239 |
| 257 try { | 240 try { |
| 258 ignoreWarmUpAndLog(measuredValue); | 241 ignoreWarmUpAndLog(measuredValue); |
| 259 } catch (exception) { | 242 } catch (exception) { |
| 260 PerfTestRunner.logFatalError("Got an exception while logging the res
ult with name=" + exception.name + ", message=" + exception.message); | 243 PerfTestRunner.logFatalError("Got an exception while logging the res
ult with name=" + exception.name + ", message=" + exception.message); |
| 261 return; | 244 return; |
| 262 } | 245 } |
| 263 | 246 |
| 264 if (completedIterations >= iterationCount) | 247 if (completedIterations >= iterationCount) |
| 265 finish(); | 248 finish(); |
| 266 } | 249 } |
| 267 | 250 |
| 268 function addRunTestStartMarker() { | |
| 269 if (!window.testRunner || !window.testRunner.supportTracing) | |
| 270 return; | |
| 271 if (completedIterations < 0) | |
| 272 console.time('blink_perf.runTest.warmup'); | |
| 273 else | |
| 274 console.time('blink_perf.runTest'); | |
| 275 } | |
| 276 | |
| 277 function addRunTestEndMarker() { | |
| 278 if (!window.testRunner || !window.testRunner.supportTracing) | |
| 279 return; | |
| 280 if (completedIterations < 0) | |
| 281 console.timeEnd('blink_perf.runTest.warmup'); | |
| 282 else | |
| 283 console.timeEnd('blink_perf.runTest'); | |
| 284 } | |
| 285 | |
| 286 | |
| 287 PerfTestRunner.measureFrameTime = function (test) { | 251 PerfTestRunner.measureFrameTime = function (test) { |
| 288 PerfTestRunner.unit = "ms"; | 252 PerfTestRunner.unit = "ms"; |
| 289 PerfTestRunner.bufferedLog = true; | 253 PerfTestRunner.bufferedLog = true; |
| 290 test.warmUpCount = test.warmUpCount || 5; | 254 test.warmUpCount = test.warmUpCount || 5; |
| 291 test.iterationCount = test.iterationCount || 10; | 255 test.iterationCount = test.iterationCount || 10; |
| 292 // Force gc before starting the test to avoid the measured time from | 256 // Force gc before starting the test to avoid the measured time from |
| 293 // being affected by gc performance. See crbug.com/667811#c16. | 257 // being affected by gc performance. See crbug.com/667811#c16. |
| 294 PerfTestRunner.gc(); | 258 PerfTestRunner.gc(); |
| 295 start(test, requestAnimationFrame, measureFrameTimeOnce); | 259 start(test, requestAnimationFrame, measureFrameTimeOnce); |
| 296 } | 260 } |
| 297 | 261 |
| 298 var lastFrameTime = -1; | 262 var lastFrameTime = -1; |
| 299 function measureFrameTimeOnce() { | 263 function measureFrameTimeOnce() { |
| 300 if (lastFrameTime != -1) | |
| 301 addRunTestEndMarker(); | |
| 302 var now = PerfTestRunner.now(); | 264 var now = PerfTestRunner.now(); |
| 303 var result = lastFrameTime == -1 ? -1 : now - lastFrameTime; | 265 var result = lastFrameTime == -1 ? -1 : now - lastFrameTime; |
| 304 lastFrameTime = now; | 266 lastFrameTime = now; |
| 305 addRunTestStartMarker(); | |
| 306 | 267 |
| 307 var returnValue = currentTest.run(); | 268 var returnValue = currentTest.run(); |
| 308 if (returnValue - 0 === returnValue) { | 269 if (returnValue - 0 === returnValue) { |
| 309 if (returnValue < 0) | 270 if (returnValue < 0) |
| 310 PerfTestRunner.log("runFunction returned a negative value: " + r
eturnValue); | 271 PerfTestRunner.log("runFunction returned a negative value: " + r
eturnValue); |
| 311 return returnValue; | 272 return returnValue; |
| 312 } | 273 } |
| 313 | 274 |
| 314 return result; | 275 return result; |
| 315 } | 276 } |
| 316 | 277 |
| 317 PerfTestRunner.measureTime = function (test) { | 278 PerfTestRunner.measureTime = function (test) { |
| 318 PerfTestRunner.unit = "ms"; | 279 PerfTestRunner.unit = "ms"; |
| 319 PerfTestRunner.bufferedLog = true; | 280 PerfTestRunner.bufferedLog = true; |
| 320 start(test, zeroTimeoutScheduler, measureTimeOnce); | 281 start(test, zeroTimeoutScheduler, measureTimeOnce); |
| 321 } | 282 } |
| 322 | 283 |
| 323 function zeroTimeoutScheduler(task) { | 284 function zeroTimeoutScheduler(task) { |
| 324 setTimeout(task, 0); | 285 setTimeout(task, 0); |
| 325 } | 286 } |
| 326 | 287 |
| 327 function measureTimeOnce() { | 288 function measureTimeOnce() { |
| 328 // Force gc before measuring time to avoid interference between tests. | 289 // Force gc before measuring time to avoid interference between tests. |
| 329 PerfTestRunner.gc(); | 290 PerfTestRunner.gc(); |
| 330 | 291 |
| 331 var start = PerfTestRunner.now(); | 292 var start = PerfTestRunner.now(); |
| 332 addRunTestStartMarker(); | |
| 333 var returnValue = currentTest.run(); | 293 var returnValue = currentTest.run(); |
| 334 addRunTestEndMarker(); | |
| 335 var end = PerfTestRunner.now(); | 294 var end = PerfTestRunner.now(); |
| 336 | 295 |
| 337 if (returnValue - 0 === returnValue) { | 296 if (returnValue - 0 === returnValue) { |
| 338 if (returnValue < 0) | 297 if (returnValue < 0) |
| 339 PerfTestRunner.log("runFunction returned a negative value: " + r
eturnValue); | 298 PerfTestRunner.log("runFunction returned a negative value: " + r
eturnValue); |
| 340 return returnValue; | 299 return returnValue; |
| 341 } | 300 } |
| 342 | 301 |
| 343 return end - start; | 302 return end - start; |
| 344 } | 303 } |
| (...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 408 | 367 |
| 409 iframe.contentDocument.close(); | 368 iframe.contentDocument.close(); |
| 410 document.body.removeChild(iframe); | 369 document.body.removeChild(iframe); |
| 411 }; | 370 }; |
| 412 | 371 |
| 413 PerfTestRunner.measureTime(test); | 372 PerfTestRunner.measureTime(test); |
| 414 } | 373 } |
| 415 | 374 |
| 416 window.PerfTestRunner = PerfTestRunner; | 375 window.PerfTestRunner = PerfTestRunner; |
| 417 })(); | 376 })(); |
| OLD | NEW |