Chromium Code Reviews| OLD | NEW |
|---|---|
| (Empty) | |
| 1 /* | |
| 2 * Copyright 2013 Google Inc. | |
| 3 * | |
| 4 * Use of this source code is governed by a BSD-style license that can be | |
| 5 * found in the LICENSE file. | |
| 6 */ | |
| 7 | |
| 8 #include "BenchTimer.h" | |
| 9 #include "PictureBenchmark.h" | |
| 10 #include "PictureRenderer.h" | |
| 11 #include "PictureRenderingFlags.h" | |
| 12 #include "SkCommandLineFlags.h" | |
| 13 #include "SkForceLinking.h" | |
| 14 #include "SkStream.h" | |
| 15 #include "SkString.h" | |
| 16 #include "SkGraphics.h" | |
|
caryclark
2013/07/10 15:28:23
alphabetize
| |
| 17 #include "TimerData.h" | |
| 18 | |
| 19 | |
| 20 __SK_FORCE_IMAGE_DECODER_LINKING; | |
| 21 | |
| 22 static const int kNumRecordings = 10; | |
| 23 static const int kNumPlaybacks = 1; | |
| 24 | |
| 25 static const int kNumTileDimensions = 2; | |
| 26 | |
| 27 static const int gTileDimensions[kNumTileDimensions][2] = { | |
|
caryclark
2013/07/10 15:28:23
use [] instead of [2]
| |
| 28 {256, 256}, | |
| 29 {512, 512}, | |
| 30 }; | |
| 31 | |
| 32 enum BenchmarkType { | |
| 33 kNormal_BenchmarkType = 0, | |
| 34 kRTree_BenchmarkType, | |
| 35 }; | |
| 36 | |
| 37 struct Histogram { | |
| 38 int pathIndex; | |
| 39 SkScalar cpuTime; | |
|
caryclark
2013/07/10 15:28:23
fPathIndex
fCpuTime
| |
| 40 }; | |
| 41 | |
| 42 // Defined in PictureRenderingFlags.cpp | |
| 43 extern bool lazy_decode_bitmap(const void* buffer, size_t size, SkBitmap* bitmap ); | |
| 44 | |
| 45 static SkPicture* pic_from_path(const char path[]) { | |
| 46 SkASSERT(NULL != success); | |
| 47 SkFILEStream stream(path); | |
| 48 if (!stream.isValid()) { | |
| 49 SkDebugf("-- Can't open '%s'\n", path); | |
| 50 return NULL; | |
| 51 } | |
| 52 return SkPicture::CreateFromStream(&stream, &lazy_decode_bitmap); | |
| 53 } | |
| 54 | |
| 55 /** | |
| 56 * This function is the sink to which all work ends up going. | |
| 57 * Renders the picture into the renderer. It may or may not use an RTree. | |
| 58 * The renderer is chosen upstream. If we want to measure recording, we will | |
| 59 * use a RecordPictureRenderer. If we want to measure rendering, we eill use a | |
| 60 * TiledPictureRenderer. | |
| 61 */ | |
| 62 static void do_benchmark_work(sk_tools::PictureRenderer* renderer, | |
| 63 int benchmarkType, const SkString *path, SkPicture* pic, | |
| 64 const int numRepeats, const char *msg) { | |
| 65 SkASSERT(NULL != pic); | |
| 66 | |
| 67 SkString msgPrefix; | |
| 68 | |
| 69 switch (benchmarkType){ | |
| 70 case kNormal_BenchmarkType: | |
| 71 msgPrefix.printf("Normal"); | |
| 72 renderer->setBBoxHierarchyType(sk_tools::PictureRenderer::kNone_BBox HierarchyType); | |
| 73 break; | |
| 74 case kRTree_BenchmarkType: | |
| 75 msgPrefix.printf("RTree"); | |
| 76 renderer->setBBoxHierarchyType(sk_tools::PictureRenderer::kRTree_BBo xHierarchyType); | |
| 77 break; | |
| 78 } | |
| 79 | |
| 80 renderer->init(pic); | |
| 81 | |
| 82 SkDebugf("%s %s %s %d times...\n", msgPrefix.c_str(), msg, path->c_str(), nu mRepeats); | |
| 83 for (int i = 0; i < numRepeats; ++i) { | |
| 84 renderer->setup(); | |
| 85 bool result = renderer->render(path); | |
| 86 if(!result) { | |
| 87 SkDebugf("Error recording.\n"); | |
| 88 } | |
| 89 } | |
| 90 renderer->end(); | |
| 91 } | |
| 92 | |
| 93 /** | |
| 94 * Call do_benchmark_work with a tiled renderer using the default tile dimension s. | |
| 95 */ | |
| 96 static void benchmark_playback(BenchmarkType benchmarkType, const SkString* path , SkPicture* pic) { | |
| 97 sk_tools::TiledPictureRenderer renderer; | |
| 98 | |
| 99 for (int i = 0; i < kNumTileDimensions; ++i) { | |
| 100 SkDebugf("Setting tile dimensions %dx%d\n", gTileDimensions[i][0], gTile Dimensions[i][1]); | |
| 101 renderer.setTileWidth(gTileDimensions[i][0]); | |
| 102 renderer.setTileHeight(gTileDimensions[i][1]); | |
| 103 | |
| 104 do_benchmark_work(&renderer, benchmarkType, path, pic, kNumPlaybacks, "t iled playback"); | |
| 105 } | |
| 106 } | |
| 107 | |
| 108 /** | |
| 109 * Call do_benchmark_work with a RecordPictureRenderer. | |
| 110 */ | |
| 111 static void benchmark_recording(BenchmarkType benchmarkType, const SkString* pat h, SkPicture* pic) { | |
| 112 sk_tools::RecordPictureRenderer renderer; | |
| 113 | |
| 114 do_benchmark_work(&renderer, benchmarkType, path, pic, kNumRecordings, "reco rding"); | |
| 115 } | |
| 116 | |
| 117 static void (*benchmark_functions[2])(BenchmarkType, const SkString*, SkPicture* ); | |
| 118 | |
| 119 static const SkString perIterTimeFormat("%f"); | |
| 120 static const SkString normalTimeFormat("%f"); | |
| 121 | |
| 122 /** | |
| 123 * Takes argc,argv along with one of the benchmark functions defined above. | |
| 124 * Will loop along all skp files and perform measurments. | |
| 125 * | |
| 126 * Returns a SkScalar representing CPU time taken during benchmark. | |
| 127 * As a side effect, it spits the timer result to stdout. | |
| 128 * Will return -1.0 on error. | |
| 129 */ | |
| 130 static SkScalar benchmark_loop( | |
| 131 int argc, | |
| 132 char **argv, | |
| 133 void (*func)(BenchmarkType, const SkString*, SkPicture*), | |
| 134 Histogram histogram[], | |
| 135 BenchmarkType benchmarkType, | |
| 136 const char* configName) { | |
| 137 TimerData timerData(perIterTimeFormat, normalTimeFormat); | |
| 138 for (int index = 1; index < argc; ++index) { | |
| 139 BenchTimer timer; | |
| 140 SkString path(argv[index]); | |
| 141 SkAutoTUnref<SkPicture> pic(pic_from_path(argv[index])); | |
| 142 timer.start(); | |
| 143 if (NULL != pic) { | |
| 144 func(benchmarkType, &path, pic); | |
| 145 } | |
| 146 timer.end(); | |
| 147 timerData.appendTimes(&timer, index == argc - 1); | |
| 148 | |
| 149 histogram[index - 1].pathIndex = index; | |
| 150 histogram[index - 1].cpuTime = timer.fCpu; | |
| 151 } | |
| 152 | |
| 153 const SkString timerResult = timerData.getResult( | |
| 154 /*logPerIter = */ false, | |
| 155 /*printMin = */ false, | |
| 156 /*repeatDraw = */ 1, | |
| 157 /*configName = */ configName, | |
| 158 /*showWallTime = */ false, | |
| 159 /*showTruncatedWallTime = */ false, | |
| 160 /*showCpuTime = */ true, | |
| 161 /*showTruncatedCpuTime = */ false, | |
| 162 /*showGpuTime = */ false); | |
| 163 | |
| 164 const char findStr[] = "= "; | |
| 165 int pos = timerResult.find(findStr); | |
| 166 if (-1 == pos) { | |
| 167 SkDebugf("Unexpected output from TimerData::getResult(...). Unable to pa rse."); | |
| 168 return -1.0; | |
|
caryclark
2013/07/10 15:28:23
this may generate a warning since it's a double ca
| |
| 169 } | |
| 170 SkDebugf("%s\n", timerResult.c_str()); | |
| 171 | |
| 172 SkScalar cpuTime = atof(timerResult.c_str() + pos + sizeof(findStr) - 1); | |
| 173 if (cpuTime == SkIntToScalar(0)) { // atof returns 0.0 on error. | |
| 174 SkDebugf("Unable to read value from timer result.\n"); | |
| 175 return -1.0; | |
| 176 } | |
| 177 return cpuTime; | |
| 178 } | |
| 179 | |
| 180 static int tool_main(int argc, char** argv) { | |
| 181 SkAutoGraphics ag; | |
| 182 SkString usage; | |
| 183 usage.printf("Usage: filename [filename]*\n"); | |
| 184 | |
| 185 if (argc < 2) { | |
| 186 SkDebugf("%s\n", usage.c_str()); | |
| 187 return 0; | |
| 188 } | |
| 189 | |
| 190 benchmark_functions[0] = benchmark_recording; | |
| 191 benchmark_functions[1] = benchmark_playback; | |
| 192 | |
| 193 static const int kNumBenchmarks = 4; | |
| 194 static const char* benchNames[] = { | |
| 195 "normal_recording", | |
| 196 "normal_playback", | |
| 197 "rtree_recording", | |
| 198 "rtree_playback", | |
| 199 }; | |
| 200 // We want names for these variables to make the arithmetic clearer. | |
| 201 SkScalar normalRecordResult; | |
| 202 SkScalar rtreeRecordResult; | |
| 203 SkScalar normalPlaybackResult; | |
| 204 SkScalar rtreePlaybackResult; | |
| 205 static SkScalar* resultPointers[] = { | |
| 206 &normalRecordResult, | |
| 207 &normalPlaybackResult, | |
| 208 &rtreeRecordResult, | |
| 209 &rtreePlaybackResult, | |
| 210 }; | |
| 211 // One histogram for every benchmark. | |
| 212 static Histogram *histograms[] = { | |
| 213 SkNEW_ARRAY(Histogram, argc - 1), | |
| 214 SkNEW_ARRAY(Histogram, argc - 1), | |
| 215 SkNEW_ARRAY(Histogram, argc - 1), | |
| 216 SkNEW_ARRAY(Histogram, argc - 1), | |
| 217 }; | |
| 218 | |
| 219 for (int i = 0; i < kNumBenchmarks; ++i) { | |
| 220 BenchmarkType type; | |
| 221 if (i < 2) { | |
| 222 type = kNormal_BenchmarkType; | |
| 223 } else { | |
| 224 type = kRTree_BenchmarkType; | |
| 225 } | |
| 226 | |
| 227 SkString benchmarkName(benchNames[i]); | |
| 228 SkScalar *resultPtr = resultPointers[i]; | |
| 229 *resultPtr = benchmark_loop( | |
| 230 argc, argv, benchmark_functions[i % 2], histograms[i], | |
| 231 type, benchmarkName.c_str()); | |
| 232 } | |
| 233 | |
| 234 // Print results | |
| 235 SkDebugf("\n"); | |
| 236 for (int i = 0; i < kNumBenchmarks; ++i) { | |
| 237 SkDebugf("%s total: \t%f\n", benchNames[i], *resultPointers[i]); | |
| 238 } | |
| 239 | |
| 240 SkASSERT(normalRecordResult != 0 && normalPlaybackResult != 0); | |
| 241 SkDebugf("\n"); | |
| 242 SkDebugf("Recording: Relative difference: %.4f\n", rtreeRecordResult / norma lRecordResult); | |
| 243 SkDebugf("Playback: Relative difference: %.4f\n", rtreePlaybackResult / nor malPlaybackResult); | |
| 244 | |
| 245 SkScalar times = | |
| 246 (kNumPlaybacks * (normalRecordResult - rtreeRecordResult)) / | |
| 247 (kNumRecordings * (rtreePlaybackResult - normalPlaybackResult)); | |
| 248 | |
| 249 SkDebugf("Number of playback repetitions for RTree to be worth it: %d (ratio : %.4f)\n", | |
| 250 SkScalarCeilToInt(times), times); | |
| 251 | |
| 252 // Print min/max times. | |
| 253 SkDebugf("\n"); | |
| 254 SkScalar minMax[][2] = { | |
| 255 // MIN MAX | |
| 256 {SK_ScalarMax, 0}, | |
| 257 {SK_ScalarMax, 0}, | |
| 258 {SK_ScalarMax, 0}, | |
| 259 {SK_ScalarMax, 0}, | |
| 260 }; | |
| 261 for (int i = 0; i < argc - 1; ++i) { | |
| 262 for (int j = 0; j < kNumBenchmarks; ++j) { | |
| 263 SkScalar value = histograms[j][i].cpuTime; | |
| 264 if (value < minMax[j][0]) { | |
| 265 minMax[j][0] = value; | |
| 266 } | |
| 267 if (value > minMax[j][1]) { | |
| 268 minMax[j][1] = value; | |
| 269 } | |
| 270 } | |
| 271 } | |
| 272 for (int i = 0; i < kNumBenchmarks; ++i) { | |
| 273 SkString out; | |
| 274 out.printf("%s min is ", benchNames[i]); | |
| 275 out.appendf("%f\n", minMax[i][0]); | |
| 276 out.appendf("%s max is ", benchNames[i]); | |
| 277 out.appendf("%f\n", minMax[i][1]); | |
| 278 SkDebugf("%s", out.c_str()); | |
| 279 } | |
| 280 | |
| 281 // Output gnuplot readable histogram data.. | |
| 282 const char* pbTitle = "bbh_shootout_playback.dat"; | |
| 283 const char* recTitle = "bbh_shootout_record.dat"; | |
| 284 SkFILEWStream playbackOut(pbTitle); | |
| 285 SkFILEWStream recordOut(recTitle); | |
| 286 playbackOut.writeText("# Index Normal RTree\n"); | |
| 287 recordOut.writeText("# Index Normal RTree\n"); | |
| 288 for (int i = 0; i < argc - 1; ++i) { | |
| 289 SkString pbLine; | |
| 290 SkString recLine; | |
| 291 pbLine.printf("%d ", i); | |
| 292 recLine.printf("%d ", i); | |
| 293 for (int j = 0; j < kNumBenchmarks; j += 2) { | |
| 294 recLine.appendf("%f ", histograms[j][i].cpuTime); | |
| 295 pbLine.appendf("%f ", histograms[j + 1][i].cpuTime); | |
| 296 } | |
| 297 pbLine.appendf("\n"); | |
| 298 recLine.appendf("\n"); | |
| 299 playbackOut.writeText(pbLine.c_str()); | |
| 300 recordOut.writeText(recLine.c_str()); | |
| 301 } | |
| 302 SkDebugf("Outputed files: %s %s\n", pbTitle, recTitle); | |
| 303 | |
| 304 for (int i = 0; i < kNumBenchmarks; ++i) { | |
| 305 SkDELETE(histograms[i]); | |
| 306 } | |
| 307 | |
| 308 return 0; | |
| 309 } | |
| 310 | |
| 311 int main(int argc, char** argv) { | |
| 312 return tool_main(argc, argv); | |
| 313 } | |
| 314 | |
| OLD | NEW |