OLD | NEW |
---|---|
1 // There are tests for computeStatistics() located in LayoutTests/fast/harness/p erftests | 1 // There are tests for computeStatistics() located in LayoutTests/fast/harness/p erftests |
2 | 2 |
3 if (window.testRunner) { | 3 if (window.testRunner) { |
4 testRunner.waitUntilDone(); | 4 testRunner.waitUntilDone(); |
5 testRunner.dumpAsText(); | 5 testRunner.dumpAsText(); |
6 } | 6 } |
7 | 7 |
8 (function () { | 8 (function () { |
9 var logLines = null; | 9 var logLines = null; |
10 var completedIterations = -1; | 10 var completedIterations = -1; |
(...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
138 | 138 |
139 PerfTestRunner.forceLayout = function(doc) { | 139 PerfTestRunner.forceLayout = function(doc) { |
140 doc = doc || document; | 140 doc = doc || document; |
141 if (doc.body) | 141 if (doc.body) |
142 doc.body.offsetHeight; | 142 doc.body.offsetHeight; |
143 else if (doc.documentElement) | 143 else if (doc.documentElement) |
144 doc.documentElement.offsetHeight; | 144 doc.documentElement.offsetHeight; |
145 }; | 145 }; |
146 | 146 |
147 function start(test, scheduler, runner) { | 147 function start(test, scheduler, runner) { |
148 if (!test) { | 148 if (!test) { |
Xianzhu
2017/05/03 19:16:00
Add "|| !runner" because with this CL, we should a
nednguyen
2017/05/03 19:18:51
Right.
nednguyen
2017/05/03 21:30:00
Done.
| |
149 PerfTestRunner.logFatalError("Got a bad test object."); | 149 PerfTestRunner.logFatalError("Got a bad test object."); |
150 return; | 150 return; |
151 } | 151 } |
152 currentTest = test; | 152 currentTest = test; |
153 | 153 |
154 if (test.tracingCategories && !test.traceEventsToMeasure) { | 154 if (test.tracingCategories && !test.traceEventsToMeasure) { |
155 PerfTestRunner.logFatalError("test's tracingCategories is " + | 155 PerfTestRunner.logFatalError("test's tracingCategories is " + |
156 "specified but test's traceEventsToMeasure is empty"); | 156 "specified but test's traceEventsToMeasure is empty"); |
157 return; | 157 return; |
158 } | 158 } |
159 | 159 |
160 if (test.traceEventsToMeasure && !test.tracingCategories) { | 160 if (test.traceEventsToMeasure && !test.tracingCategories) { |
161 PerfTestRunner.logFatalError("test's traceEventsToMeasure is " + | 161 PerfTestRunner.logFatalError("test's traceEventsToMeasure is " + |
162 "specified but test's tracingCategories is empty"); | 162 "specified but test's tracingCategories is empty"); |
163 return; | 163 return; |
164 } | 164 } |
165 iterationCount = test.iterationCount || (window.testRunner ? 5 : 20); | 165 iterationCount = test.iterationCount || (window.testRunner ? 5 : 20); |
166 if (test.warmUpCount && test.warmUpCount > 0) | 166 if (test.warmUpCount && test.warmUpCount > 0) |
167 completedIterations = -test.warmUpCount; | 167 completedIterations = -test.warmUpCount; |
168 logLines = PerfTestRunner.bufferedLog || window.testRunner ? [] : null; | 168 logLines = PerfTestRunner.bufferedLog || window.testRunner ? [] : null; |
169 PerfTestRunner.log("Running " + iterationCount + " times"); | 169 PerfTestRunner.log("Running " + iterationCount + " times"); |
170 if (test.doNotIgnoreInitialRun) | 170 if (test.doNotIgnoreInitialRun) |
171 completedIterations++; | 171 completedIterations++; |
172 | 172 |
173 if (!scheduler) { | |
174 runner(); | |
175 return; | |
176 } | |
177 | |
173 if (runner && test.tracingCategories && window.testRunner && | 178 if (runner && test.tracingCategories && window.testRunner && |
174 window.testRunner.supportTracing) { | 179 window.testRunner.supportTracing) { |
175 window.testRunner.traceEventsToMeasure = test.traceEventsToMeasure; | 180 window.testRunner.traceEventsToMeasure = test.traceEventsToMeasure; |
176 window.testRunner.startTracing(test.tracingCategories, function() { | 181 window.testRunner.startTracing(test.tracingCategories, function() { |
177 scheduleNextRun(scheduler, runner); | 182 scheduleNextRun(scheduler, runner); |
178 }); | 183 }); |
179 } else if (runner) { | 184 } else if (runner) { |
180 if (test.tracingCategories && !(window.testRuner && | 185 if (test.tracingCategories && !(window.testRuner && |
Xianzhu
2017/05/03 19:16:00
There was a typo "testRuner" here.
How about chan
nednguyen
2017/05/03 19:18:50
Thanks, I will make a separate CL for this.
| |
181 window.testRunner.supportTracing)) { | 186 window.testRunner.supportTracing)) { |
182 PerfTestRunner.log("Tracing based metrics are specified but " + | 187 PerfTestRunner.log("Tracing based metrics are specified but " + |
183 "tracing is not supported on this platform. To get those " + | 188 "tracing is not supported on this platform. To get those " + |
184 "metrics from this test, you can run the test using " + | 189 "metrics from this test, you can run the test using " + |
185 "tools/perf/run_benchmarks script."); | 190 "tools/perf/run_benchmarks script."); |
186 } | 191 } |
187 scheduleNextRun(scheduler, runner); | 192 scheduleNextRun(scheduler, runner); |
188 } | 193 } |
189 } | 194 } |
190 | 195 |
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
260 testRunner.notifyDone(); | 265 testRunner.notifyDone(); |
261 }); | 266 }); |
262 } else { | 267 } else { |
263 testRunner.notifyDone(); | 268 testRunner.notifyDone(); |
264 } | 269 } |
265 } | 270 } |
266 } | 271 } |
267 | 272 |
268 PerfTestRunner.prepareToMeasureValuesAsync = function (test) { | 273 PerfTestRunner.prepareToMeasureValuesAsync = function (test) { |
269 PerfTestRunner.unit = test.unit; | 274 PerfTestRunner.unit = test.unit; |
270 start(test); | 275 start(test, undefined, test.run); |
Xianzhu
2017/05/03 17:14:25
How about:
start(test, function(task) { task();
nednguyen
2017/05/03 18:21:52
I tried this first, but doing so lead to invalid t
Xianzhu
2017/05/03 19:16:00
I see the problem. scheduleNextRun() doesn't suppo
nednguyen
2017/05/03 19:18:50
Acknowledged.
| |
271 } | 276 } |
272 | 277 |
273 PerfTestRunner.measureValueAsync = function (measuredValue) { | 278 PerfTestRunner.measureValueAsync = function (measuredValue) { |
274 completedIterations++; | 279 completedIterations++; |
275 | 280 |
276 try { | 281 try { |
277 ignoreWarmUpAndLog(measuredValue); | 282 ignoreWarmUpAndLog(measuredValue); |
278 } catch (exception) { | 283 } catch (exception) { |
279 PerfTestRunner.logFatalError("Got an exception while logging the res ult with name=" + exception.name + ", message=" + exception.message); | 284 PerfTestRunner.logFatalError("Got an exception while logging the res ult with name=" + exception.name + ", message=" + exception.message); |
280 return; | 285 return; |
(...skipping 146 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
427 | 432 |
428 iframe.contentDocument.close(); | 433 iframe.contentDocument.close(); |
429 document.body.removeChild(iframe); | 434 document.body.removeChild(iframe); |
430 }; | 435 }; |
431 | 436 |
432 PerfTestRunner.measureTime(test); | 437 PerfTestRunner.measureTime(test); |
433 } | 438 } |
434 | 439 |
435 window.PerfTestRunner = PerfTestRunner; | 440 window.PerfTestRunner = PerfTestRunner; |
436 })(); | 441 })(); |
OLD | NEW |