OLD | NEW |
1 // There are tests for computeStatistics() located in LayoutTests/fast/harness/p
erftests | 1 // There are tests for computeStatistics() located in LayoutTests/fast/harness/p
erftests |
2 | 2 |
3 if (window.testRunner) { | 3 if (window.testRunner) { |
4 testRunner.waitUntilDone(); | 4 testRunner.waitUntilDone(); |
5 testRunner.dumpAsText(); | 5 testRunner.dumpAsText(); |
6 } | 6 } |
7 | 7 |
8 (function () { | 8 (function () { |
9 var logLines = null; | 9 var logLines = null; |
10 var completedIterations = -1; | 10 var completedIterations = -1; |
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
150 return; | 150 return; |
151 } | 151 } |
152 currentTest = test; | 152 currentTest = test; |
153 iterationCount = test.iterationCount || (window.testRunner ? 5 : 20); | 153 iterationCount = test.iterationCount || (window.testRunner ? 5 : 20); |
154 if (test.warmUpCount && test.warmUpCount > 0) | 154 if (test.warmUpCount && test.warmUpCount > 0) |
155 completedIterations = -test.warmUpCount; | 155 completedIterations = -test.warmUpCount; |
156 logLines = PerfTestRunner.bufferedLog || window.testRunner ? [] : null; | 156 logLines = PerfTestRunner.bufferedLog || window.testRunner ? [] : null; |
157 PerfTestRunner.log("Running " + iterationCount + " times"); | 157 PerfTestRunner.log("Running " + iterationCount + " times"); |
158 if (test.doNotIgnoreInitialRun) | 158 if (test.doNotIgnoreInitialRun) |
159 completedIterations++; | 159 completedIterations++; |
160 if (runner) | 160 |
| 161 if (runner && test.tracingCategories && window.testRunner && |
| 162 window.testRunner.supportTracing) { |
| 163 window.testRunner.traceEventsToMeasure = test.traceEventsToMeasure; |
| 164 window.testRunner.startTracing(test.tracingCategories, function() { |
| 165 scheduleNextRun(scheduler, runner); |
| 166 }); |
| 167 } else if (runner) { |
161 scheduleNextRun(scheduler, runner); | 168 scheduleNextRun(scheduler, runner); |
| 169 } |
162 } | 170 } |
163 | 171 |
164 function scheduleNextRun(scheduler, runner) { | 172 function scheduleNextRun(scheduler, runner) { |
165 scheduler(function () { | 173 scheduler(function () { |
166 // This will be used by tools/perf/benchmarks/blink_perf.py to find | 174 // This will be used by tools/perf/benchmarks/blink_perf.py to find |
167 // traces during the measured runs. | 175 // traces during the measured runs. |
168 if (completedIterations >= 0) | 176 if (completedIterations >= 0) |
169 console.time("blink_perf"); | 177 console.time("blink_perf"); |
170 | 178 |
171 try { | 179 try { |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
218 } | 226 } |
219 if (logLines) | 227 if (logLines) |
220 logLines.forEach(logInDocument); | 228 logLines.forEach(logInDocument); |
221 window.scrollTo(0, document.body.offsetHeight); | 229 window.scrollTo(0, document.body.offsetHeight); |
222 if (currentTest.done) | 230 if (currentTest.done) |
223 currentTest.done(); | 231 currentTest.done(); |
224 } catch (exception) { | 232 } catch (exception) { |
225 logInDocument("Got an exception while finalizing the test with name=
" + exception.name + ", message=" + exception.message); | 233 logInDocument("Got an exception while finalizing the test with name=
" + exception.name + ", message=" + exception.message); |
226 } | 234 } |
227 | 235 |
228 if (window.testRunner) | 236 if (window.testRunner) { |
229 testRunner.notifyDone(); | 237 if (currentTest.traceEventsToMeasure && |
| 238 testRunner.supportTracing) { |
| 239 testRunner.stopTracingAndMeasure( |
| 240 currentTest.traceEventsToMeasure, function() { |
| 241 testRunner.notifyDone(); |
| 242 }); |
| 243 } else { |
| 244 testRunner.notifyDone(); |
| 245 } |
| 246 } |
230 } | 247 } |
231 | 248 |
232 PerfTestRunner.prepareToMeasureValuesAsync = function (test) { | 249 PerfTestRunner.prepareToMeasureValuesAsync = function (test) { |
233 PerfTestRunner.unit = test.unit; | 250 PerfTestRunner.unit = test.unit; |
234 start(test); | 251 start(test); |
235 } | 252 } |
236 | 253 |
237 PerfTestRunner.measureValueAsync = function (measuredValue) { | 254 PerfTestRunner.measureValueAsync = function (measuredValue) { |
238 completedIterations++; | 255 completedIterations++; |
239 | 256 |
240 try { | 257 try { |
241 ignoreWarmUpAndLog(measuredValue); | 258 ignoreWarmUpAndLog(measuredValue); |
242 } catch (exception) { | 259 } catch (exception) { |
243 PerfTestRunner.logFatalError("Got an exception while logging the res
ult with name=" + exception.name + ", message=" + exception.message); | 260 PerfTestRunner.logFatalError("Got an exception while logging the res
ult with name=" + exception.name + ", message=" + exception.message); |
244 return; | 261 return; |
245 } | 262 } |
246 | 263 |
247 if (completedIterations >= iterationCount) | 264 if (completedIterations >= iterationCount) |
248 finish(); | 265 finish(); |
249 } | 266 } |
250 | 267 |
| 268 function addRunTestStartMarker() { |
| 269 if (!window.testRunner || !window.testRunner.supportTracing) |
| 270 return; |
| 271 if (completedIterations < 0) |
| 272 console.time('blink_perf.runTest.warmup'); |
| 273 else |
| 274 console.time('blink_perf.runTest'); |
| 275 } |
| 276 |
| 277 function addRunTestEndMarker() { |
| 278 if (!window.testRunner || !window.testRunner.supportTracing) |
| 279 return; |
| 280 if (completedIterations < 0) |
| 281 console.timeEnd('blink_perf.runTest.warmup'); |
| 282 else |
| 283 console.timeEnd('blink_perf.runTest'); |
| 284 } |
| 285 |
| 286 |
251 PerfTestRunner.measureFrameTime = function (test) { | 287 PerfTestRunner.measureFrameTime = function (test) { |
252 PerfTestRunner.unit = "ms"; | 288 PerfTestRunner.unit = "ms"; |
253 PerfTestRunner.bufferedLog = true; | 289 PerfTestRunner.bufferedLog = true; |
254 test.warmUpCount = test.warmUpCount || 5; | 290 test.warmUpCount = test.warmUpCount || 5; |
255 test.iterationCount = test.iterationCount || 10; | 291 test.iterationCount = test.iterationCount || 10; |
256 // Force gc before starting the test to avoid the measured time from | 292 // Force gc before starting the test to avoid the measured time from |
257 // being affected by gc performance. See crbug.com/667811#c16. | 293 // being affected by gc performance. See crbug.com/667811#c16. |
258 PerfTestRunner.gc(); | 294 PerfTestRunner.gc(); |
259 start(test, requestAnimationFrame, measureFrameTimeOnce); | 295 start(test, requestAnimationFrame, measureFrameTimeOnce); |
260 } | 296 } |
261 | 297 |
262 var lastFrameTime = -1; | 298 var lastFrameTime = -1; |
263 function measureFrameTimeOnce() { | 299 function measureFrameTimeOnce() { |
| 300 if (lastFrameTime != -1) |
| 301 addRunTestEndMarker(); |
264 var now = PerfTestRunner.now(); | 302 var now = PerfTestRunner.now(); |
265 var result = lastFrameTime == -1 ? -1 : now - lastFrameTime; | 303 var result = lastFrameTime == -1 ? -1 : now - lastFrameTime; |
266 lastFrameTime = now; | 304 lastFrameTime = now; |
| 305 addRunTestStartMarker(); |
267 | 306 |
268 var returnValue = currentTest.run(); | 307 var returnValue = currentTest.run(); |
269 if (returnValue - 0 === returnValue) { | 308 if (returnValue - 0 === returnValue) { |
270 if (returnValue < 0) | 309 if (returnValue < 0) |
271 PerfTestRunner.log("runFunction returned a negative value: " + r
eturnValue); | 310 PerfTestRunner.log("runFunction returned a negative value: " + r
eturnValue); |
272 return returnValue; | 311 return returnValue; |
273 } | 312 } |
274 | 313 |
275 return result; | 314 return result; |
276 } | 315 } |
277 | 316 |
278 PerfTestRunner.measureTime = function (test) { | 317 PerfTestRunner.measureTime = function (test) { |
279 PerfTestRunner.unit = "ms"; | 318 PerfTestRunner.unit = "ms"; |
280 PerfTestRunner.bufferedLog = true; | 319 PerfTestRunner.bufferedLog = true; |
281 start(test, zeroTimeoutScheduler, measureTimeOnce); | 320 start(test, zeroTimeoutScheduler, measureTimeOnce); |
282 } | 321 } |
283 | 322 |
284 function zeroTimeoutScheduler(task) { | 323 function zeroTimeoutScheduler(task) { |
285 setTimeout(task, 0); | 324 setTimeout(task, 0); |
286 } | 325 } |
287 | 326 |
288 function measureTimeOnce() { | 327 function measureTimeOnce() { |
289 // Force gc before measuring time to avoid interference between tests. | 328 // Force gc before measuring time to avoid interference between tests. |
290 PerfTestRunner.gc(); | 329 PerfTestRunner.gc(); |
291 | 330 |
292 var start = PerfTestRunner.now(); | 331 var start = PerfTestRunner.now(); |
| 332 addRunTestStartMarker(); |
293 var returnValue = currentTest.run(); | 333 var returnValue = currentTest.run(); |
| 334 addRunTestEndMarker(); |
294 var end = PerfTestRunner.now(); | 335 var end = PerfTestRunner.now(); |
295 | 336 |
296 if (returnValue - 0 === returnValue) { | 337 if (returnValue - 0 === returnValue) { |
297 if (returnValue < 0) | 338 if (returnValue < 0) |
298 PerfTestRunner.log("runFunction returned a negative value: " + r
eturnValue); | 339 PerfTestRunner.log("runFunction returned a negative value: " + r
eturnValue); |
299 return returnValue; | 340 return returnValue; |
300 } | 341 } |
301 | 342 |
302 return end - start; | 343 return end - start; |
303 } | 344 } |
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
367 | 408 |
368 iframe.contentDocument.close(); | 409 iframe.contentDocument.close(); |
369 document.body.removeChild(iframe); | 410 document.body.removeChild(iframe); |
370 }; | 411 }; |
371 | 412 |
372 PerfTestRunner.measureTime(test); | 413 PerfTestRunner.measureTime(test); |
373 } | 414 } |
374 | 415 |
375 window.PerfTestRunner = PerfTestRunner; | 416 window.PerfTestRunner = PerfTestRunner; |
376 })(); | 417 })(); |
OLD | NEW |