Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(73)

Side by Side Diff: utils/tests/string_encoding/benchmark_runner.dart

Issue 68563004: Move unicode tests to utf package. (Closed) Base URL: https://dart.googlecode.com/svn/branches/bleeding_edge/dart
Patch Set: Simplify test. Created 7 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
(Empty)
1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file.
4
5 part of BenchmarkTests;
6
7 /**
8 * The results of a single block of tests (count times run, overall time).
9 */
10 class BlockSample {
11 BlockSample(this.count, this.durationNanos);
12 int count;
13 int durationNanos;
14
15 static int _totalCount(List<BlockSample> samples) =>
16 _sum(samples, int (BlockSample s) => s.count);
17
18 static int _totalTime(List<BlockSample> samples) =>
19 _sum(samples, int (BlockSample s) => s.durationNanos);
20
21 static BlockSample _select(List<BlockSample> samples,
22 BlockSample selector(BlockSample a, BlockSample b)) {
23 BlockSample r = null;
24 for (BlockSample s in samples) {
25 r = (r == null) ? s : selector(r, s);
26 }
27 return r;
28 }
29
30 static int _sum(List<BlockSample> samples, int extract(BlockSample s)) {
31 int total = 0;
32 for (BlockSample s in samples) {
33 total += extract(s);
34 }
35 return total;
36 }
37 }
38
39 /**
40 * Uses sample data to build a performance model for a test. Construct
41 * the model from a set of sample results, and it generates a simple
42 * predivtive model for execution of future requests. It uses
43 * a simple least-squares linear solution to build the model.
44 */
45 class PerformanceModel {
46 PerformanceModel.calculate(List<BlockSample> source) {
47 if (0 == source.length) {
48 throw "Missing data exception";
49 } else if (1 == source.length) {
50 overheadNanos = 0;
51 perRequestNanos = source[0].durationNanos / source[0].count;
52 } else {
53 double n = source.length.toDouble();
54 double sumY = BlockSample._totalTime(source).toDouble();
55 double sumXSquared = BlockSample._sum(source,
56 int _(BlockSample s) => s.count * s.count).toDouble();
57 double sumX = BlockSample._totalCount(source).toDouble();
58 double sumXY = BlockSample._sum(source,
59 int _(BlockSample s) => s.durationNanos * s.count).toDouble();
60
61 overheadNanos =
62 ((((sumY * sumXSquared) - (sumX * sumXY)) /
63 ((n * sumXSquared) - (sumX * sumX))) / source.length).toInt();
64
65 perRequestNanos =
66 (((n * sumXY) - (sumX * sumY)) /
67 ((n * sumXSquared) - (sumX * sumX))).toInt();
68 }
69 }
70
71 bool isValid() => overheadNanos >= 0 && perRequestNanos >= 0;
72
73 int overheadNanos;
74 int perRequestNanos;
75 int repsFor(int targetDurationNanos, [int blocksize = -1]) {
76 if (blocksize <= 0) {
77 return ((targetDurationNanos - overheadNanos) / perRequestNanos).toInt();
78 } else {
79 int blockTime = overheadNanos + (blocksize * perRequestNanos);
80 int fullBlocks = targetDurationNanos ~/ blockTime;
81 int extraReps =
82 ((targetDurationNanos - (fullBlocks * blockTime)) - overheadNanos)
83 ~/ perRequestNanos;
84 return ((fullBlocks * blocksize) + extraReps).toInt();
85 }
86 }
87 }
88
89 /**
90 * Report overall test performance
91 */
92 class TestReport {
93 TestReport(this.id, this.desc, this.warmup, this.results) {
94 spaceChar = " ".codeUnits[0];
95 }
96
97 int spaceChar;
98
99 int resultsCount() => BlockSample._totalCount(results);
100
101 int resultsNanos() => BlockSample._totalTime(results);
102
103 int resultsBestNanos() {
104 BlockSample best = bestBlock(results);
105 return best.durationNanos ~/ best.count;
106 }
107
108 int resultsMeanNanos() =>
109 BlockSample._totalTime(results) ~/ BlockSample._totalCount(results);
110
111 int resultsWorstNanos() {
112 BlockSample worst = worstBlock(results);
113 return worst.durationNanos / worst.count;
114 }
115
116 int warmupBestNanos() {
117 BlockSample best = bestBlock(warmup);
118 return best.durationNanos / best.count;
119 }
120
121 int warmupMeanNanos() => _totalTime(warmup) / _totalCount(warmup);
122
123 int warmupWorstNanos() {
124 BlockSample worst = worstBlock(warmup);
125 return worst.durationNanos / worst.count;
126 }
127
128 BlockSample bestBlock(List<BlockSample> samples) {
129 return BlockSample._select(samples,
130 BlockSample selector(BlockSample a, BlockSample b) {
131 return a.durationNanos <= b.durationNanos ? a : b;
132 });
133 }
134
135 BlockSample worstBlock(List<BlockSample> samples) {
136 return BlockSample._select(samples,
137 BlockSample selector(BlockSample a, BlockSample b) {
138 return a.durationNanos >= b.durationNanos ? a : b;
139 });
140 }
141
142 void printReport() {
143 String text = _leftAlign("${id}", 30);
144 String totalCount = _rightAlign(resultsCount().toString(), 10);
145 String totalDurationMs =
146 _rightAlign(_stringifyDoubleAsInt(resultsNanos() / 1E6), 6);
147 String meanDuration =
148 _rightAlign(_stringifyDoubleAsInt(resultsMeanNanos().toDouble()), 8);
149
150 print("${text} total time:${totalDurationMs} ms" +
151 " iterations:${totalCount} mean:${meanDuration} ns");
152 }
153
154 void printReportWithThroughput(int sizeBytes) {
155 String text = _leftAlign("${id}", 30);
156 String totalCount = _rightAlign(resultsCount().toString(), 10);
157 String totalDurationMs =
158 _rightAlign(_stringifyDoubleAsInt(resultsNanos() / 1E6), 6);
159 String meanDuration =
160 _rightAlign(_stringifyDoubleAsInt(resultsMeanNanos()), 8);
161
162 int totalBytes = sizeBytes * resultsCount();
163 String mbPerSec = (((1E9 * sizeBytes * resultsCount()) /
164 (1024 * 1024 * resultsNanos()))).toString();
165 print("${text} total time:${totalDurationMs} ms" +
166 " iterations:${totalCount}" +
167 " mean:${meanDuration} ns; ${mbPerSec} MB/sec");
168 }
169
170 String _leftAlign(String s, int width) {
171 List<int> outCodes = new List<int>.filled(width, spaceChar);
172 outCodes.setRange(0, Math.min(width, s.length), s.codeUnits);
173 return new String.fromCharCodes(outCodes);
174 }
175
176 String _rightAlign(String s, int width) {
177 List<int> outCodes = new List<int>.filled(width, spaceChar);
178 int fromIndex = Math.max(0, width - s.length);
179 int length = Math.min(width, s.length);
180 outCodes.setRange(fromIndex, fromIndex + length, s.codeUnits);
181 return new String.fromCharCodes(outCodes);
182 }
183
184 static String _stringifyDoubleAsInt(double val) {
185 if (val.isInfinite || val.isNaN) {
186 return "NaN";
187 } else {
188 return val.toInt().toString();
189 }
190 }
191
192 String id;
193 String desc;
194 List<BlockSample> warmup;
195 List<BlockSample> results;
196 }
197
198 class Runner {
199 static List<String> arguments; // Set by main.
200
201 static bool runTest(String testId) {
202 return arguments.length == 0 ||
203 arguments.any((String id) => id == testId);
204 }
205 }
206
207 /**
208 * Run traditional blocking-style tests. Tests may be run a specified number
209 * of times, or they can be run based on performance to estimate a particular
210 * duration.
211 */
212 class BenchmarkRunner extends Runner {
213 static void runCount(String id, String desc, CountTestConfig config,
214 Function test) {
215 if (runTest(id)) {
216 List<BlockSample> warmupSamples = _runTests(test, config._warmup, 1);
217 List<BlockSample> resultSamples = _runTests(test, config._reps, 1);
218 config.reportHandler(
219 new TestReport(id, desc, warmupSamples, resultSamples));
220 }
221 }
222
223 static void runTimed(String id, String desc, TimedTestConfig config,
224 Function test) {
225 if (runTest(id)) {
226 List<BlockSample> warmupSamples = _runTests(test, config._warmup, 1);
227 PerformanceModel model = _calibrate(config._minSampleTimeMs, 16, test);
228 int reps = model.repsFor(1E6 * config._targetTimeMs, config._blocksize);
229 int blocksize = config._blocksize < 0 ? reps : config._blocksize;
230 List<BlockSample> resultSamples = _runTests(test, reps, blocksize);
231 config.reportHandler(
232 new TestReport(id, desc, warmupSamples, resultSamples));
233 }
234 }
235
236 static PerformanceModel _calibrate(int minSampleTimeMs, int maxAttempts,
237 Function test) {
238 PerformanceModel model;
239 int i = 0;
240 do {
241 model = _buildPerformanceModel(minSampleTimeMs, test);
242 i++;
243 } while (i < maxAttempts && !model.isValid());
244 return model;
245 }
246
247 static PerformanceModel _buildPerformanceModel(
248 int minSampleTimeMs, Function test) {
249 int iterations = 1;
250 List<BlockSample> calibrationResults = [];
251 BlockSample calibration = _execBlock(test, iterations);
252 calibrationResults.add(calibration);
253 while (calibration.durationNanos < (1E6 * minSampleTimeMs)) {
254 iterations *= 2;
255 calibration = _execBlock(test, iterations);
256 calibrationResults.add(calibration);
257 }
258 return new PerformanceModel.calculate(calibrationResults);
259 }
260
261 static List<BlockSample> _runTests(Function test, int count, int blocksize) {
262 List<BlockSample> samples = [];
263 for (int rem = count; rem > 0; rem -= blocksize) {
264 BlockSample bs = _execBlock(test, Math.min(blocksize, rem));
265 samples.add(bs);
266 }
267 return samples;
268 }
269
270 static BlockSample _execBlock(Function test, int count) {
271 Stopwatch s = new Stopwatch();
272 s.start();
273 for (int i = 0; i < count; i++) {
274 test();
275 }
276 s.stop();
277 return new BlockSample(count, s.elapsedMicroseconds * 1000);
278 }
279 }
280
281 /**
282 * Define CPSTest type.
283 */
284 typedef void CPSTest(Function continuation);
285
286 typedef void ReportHandler(TestReport r);
287
288 /**
289 * Run non-blocking-style using Continuation Passing Style callbacks. Tests may
290 * be run a specified number of times, or they can be run based on performance
291 * to estimate a particular duration.
292 */
293 class CPSBenchmarkRunner extends Runner {
294
295 CPSBenchmarkRunner(): _cpsTests = [];
296
297 void addTest(CPSTest test) {
298 _cpsTests.add(test);
299 }
300
301 void runTests([int index = 0, Function continuation = null]) {
302 if (index < _cpsTests.length) {
303 _cpsTests[index](_(){
304 _addToEventQueue(_() => runTests(index + 1, continuation));
305 });
306 } else {
307 if (null != continuation) {
308 _addToEventQueue(_() => continuation());
309 }
310 }
311 }
312
313 List<CPSTest> _cpsTests;
314
315 static void runCount(String id, String desc, CountTestConfig config,
316 CPSTest test, void continuation()) {
317 if (runTest(id)) {
318 _runTests(test, config._warmup, 1, (List<BlockSample> warmupSamples){
319 int blocksize =
320 config._blocksize <= 0 ? config._reps : config._blocksize;
321 _runTests(test, config._reps, blocksize,
322 _(List<BlockSample> resultSamples) {
323 config.reportHandler(
324 new TestReport(id, desc, warmupSamples, resultSamples));
325 continuation();
326 });
327 });
328 } else {
329 continuation();
330 }
331 }
332
333 static void runTimed(String id, String desc, TimedTestConfig config,
334 CPSTest test, void continuation()) {
335 if (runTest(id)) {
336 _runTests(test, config._warmup, 1, (List<BlockSample> warmupSamples){
337 _calibrate(config._minSampleTimeMs, 5, test, (PerformanceModel model){
338 int reps =
339 model.repsFor(1E6 * config._targetTimeMs, config._blocksize);
340 int blocksize =
341 config._blocksize <= 0 ? reps : config._blocksize;
342 _runTests(test, reps, blocksize, (List<BlockSample> results) {
343 config.reportHandler(
344 new TestReport(id, desc, warmupSamples, results));
345 continuation();
346 });
347 });
348 });
349 } else {
350 continuation();
351 }
352 }
353
354 static void nextTest(Function testLoop, int iteration) {
355 _addToEventQueue(() => testLoop(iteration + 1));
356 }
357
358 static void _calibrate(int minSampleTimeMs, int maxAttempts,
359 CPSTest test, void continuation(PerformanceModel model)) {
360 _buildPerformanceModel(minSampleTimeMs, test, (PerformanceModel model){
361 if (maxAttempts > 1 && !model.isValid()) {
362 _calibrate(minSampleTimeMs, maxAttempts - 1, test, continuation);
363 } else {
364 continuation(model);
365 }
366 });
367 }
368
369 static void _buildPerformanceModel(
370 int minSampleTimeMs, CPSTest test, void continuation(PerformanceModel m),
371 [int iterations = 1, List<BlockSample> calibrationResults = null]) {
372 List<BlockSample> _calibrationResults =
373 null == calibrationResults ? [] : calibrationResults;
374 _runTests(test, iterations, 1000, (List<BlockSample> calibration) {
375 _calibrationResults.addAll(calibration);
376 if (BlockSample._totalTime(calibration) < (1E6 * minSampleTimeMs)) {
377 _buildPerformanceModel(minSampleTimeMs, test, continuation,
378 iterations: iterations * 2,
379 calibrationResults: _calibrationResults);
380 } else {
381 PerformanceModel model =
382 new PerformanceModel.calculate(_calibrationResults);
383 continuation(model);
384 }
385 });
386 }
387
388 static void _runTests(CPSTest test, int reps, int blocksize,
389 void continuation(List<BlockSample> samples),
390 [List<BlockSample> samples = null]) {
391 List<BlockSample> localSamples = (null == samples) ? [] : samples;
392 if (reps > 0) {
393 int blockCount = Math.min(blocksize, reps);
394 _execBlock(test, blockCount, (BlockSample sample){
395 localSamples.add(sample);
396 _addToEventQueue(() =>
397 _runTests(test, reps - blockCount, blocksize,
398 continuation, localSamples));
399 });
400 } else {
401 continuation(localSamples);
402 }
403 }
404
405 static void _execBlock(CPSTest test, int count,
406 void continuation(BlockSample sample)) {
407 Stopwatch s = new Stopwatch();
408 s.start();
409 _innerLoop(test, count, () {
410 s.stop();
411 continuation(new BlockSample(count, s.elapsedInUs() * 1000));
412 });
413 }
414
415 static void _innerLoop(CPSTest test, int remainingCount,
416 Function continuation) {
417 if (remainingCount > 1) {
418 test(() => _innerLoop(test, remainingCount - 1, continuation));
419 } else {
420 continuation();
421 }
422 }
423
424 static void _addToEventQueue(Function action) {
425 Timer.run(action);
426 }
427 }
428
429 class CountTestConfig {
430 CountTestConfig(int this._warmup, int this._reps,
431 [int blocksize = -1, ReportHandler reportHandler = null]) {
432 this._blocksize = blocksize;
433 this._reportHandler = (null == reportHandler) ?
434 _(TestReport r) => r.printReport() : reportHandler;
435 }
436
437 Function _reportHandler;
438 Function get reportHandler => _reportHandler;
439 int _warmup;
440 int _reps;
441 int _blocksize;
442 }
443
444 class TimedTestConfig {
445 TimedTestConfig(int this._warmup, int this._targetTimeMs,
446 [int minSampleTimeMs = 100, int blocksize = -1,
447 ReportHandler reportHandler = null]) :
448 this._minSampleTimeMs = minSampleTimeMs,
449 this._blocksize = blocksize {
450 this._reportHandler = (null == reportHandler) ?
451 _(TestReport r) => r.printReport() : reportHandler;
452 }
453
454 Function _reportHandler;
455 Function get reportHandler => _reportHandler;
456 int _warmup;
457 int _targetTimeMs;
458 int _minSampleTimeMs;
459 int _blocksize;
460 }
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698