| OLD | NEW |
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 # Copyright 2016 The Chromium Authors. All rights reserved. | 2 # Copyright 2016 The Chromium Authors. All rights reserved. |
| 3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
| 4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
| 5 | 5 |
| 6 """Script to generate chromium.perf.json and chromium.perf.fyi.json in | 6 """Script to generate chromium.perf.json and chromium.perf.fyi.json in |
| 7 the src/testing/buildbot directory. Maintaining these files by hand is | 7 the src/testing/buildbot directory. Maintaining these files by hand is |
| 8 too unwieldy. | 8 too unwieldy. |
| 9 """ | 9 """ |
| 10 | 10 |
| (...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 71 }, | 71 }, |
| 72 { | 72 { |
| 73 'name': 'Android Nexus9 Perf', | 73 'name': 'Android Nexus9 Perf', |
| 74 'shards': [2] | 74 'shards': [2] |
| 75 }, | 75 }, |
| 76 ], | 76 ], |
| 77 } | 77 } |
| 78 }, | 78 }, |
| 79 { | 79 { |
| 80 'args': [ | 80 'args': [ |
| 81 'cc_perftests', | |
| 82 '--test-launcher-print-test-stdio=always' | |
| 83 ], | |
| 84 'name': 'cc_perftests', | |
| 85 'script': 'gtest_perf_test.py', | |
| 86 'testers': { | |
| 87 'chromium.perf': [ | |
| 88 { | |
| 89 'name': 'Linux Perf', | |
| 90 'shards': [3] | |
| 91 }, | |
| 92 ] | |
| 93 } | |
| 94 }, | |
| 95 { | |
| 96 'args': [ | |
| 97 'tracing_perftests', | |
| 98 '--test-launcher-print-test-stdio=always' | |
| 99 ], | |
| 100 'name': 'tracing_perftests', | |
| 101 'script': 'gtest_perf_test.py', | |
| 102 'testers': { | |
| 103 'chromium.perf': [ | |
| 104 { | |
| 105 'name': 'Linux Perf', | |
| 106 'shards': [3] | |
| 107 }, | |
| 108 ] | |
| 109 } | |
| 110 }, | |
| 111 { | |
| 112 'args': [ | |
| 113 'load_library_perf_tests', | 81 'load_library_perf_tests', |
| 114 '--test-launcher-print-test-stdio=always' | 82 '--test-launcher-print-test-stdio=always' |
| 115 ], | 83 ], |
| 116 'name': 'load_library_perf_tests', | 84 'name': 'load_library_perf_tests', |
| 117 'script': 'gtest_perf_test.py', | 85 'script': 'gtest_perf_test.py', |
| 118 'testers': { | 86 'testers': { |
| 119 'chromium.perf': [ | 87 'chromium.perf': [ |
| 120 { | 88 { |
| 121 'name': 'Linux Perf', | |
| 122 'shards': [3] | |
| 123 }, | |
| 124 { | |
| 125 'name': 'Win 7 ATI GPU Perf', | |
| 126 'shards': [2] | |
| 127 }, | |
| 128 { | |
| 129 'name': 'Win 7 Nvidia GPU Perf', | |
| 130 'shards': [2] | |
| 131 }, | |
| 132 { | |
| 133 'name': 'Win 7 Perf', | |
| 134 'shards': [3] | |
| 135 }, | |
| 136 { | |
| 137 'name': 'Win 7 x64 Perf', | |
| 138 'shards': [2] | |
| 139 }, | |
| 140 { | |
| 141 'name': 'Win 8 Perf', | 89 'name': 'Win 8 Perf', |
| 142 'shards': [2] | 90 'shards': [2] |
| 143 }, | 91 }, |
| 144 ] | 92 ] |
| 145 } | 93 } |
| 146 }, | 94 }, |
| 147 { | 95 { |
| 148 'args': [ | 96 'args': [ |
| 149 'performance_browser_tests', | 97 'performance_browser_tests', |
| 150 '--test-launcher-print-test-stdio=always', | 98 '--test-launcher-print-test-stdio=always', |
| 151 '--gtest_filter=TabCapturePerformanceTest.*:CastV2PerformanceTest.*', | 99 '--gtest_filter=TabCapturePerformanceTest.*:CastV2PerformanceTest.*', |
| 152 '--test-launcher-jobs=1', | 100 '--test-launcher-jobs=1', |
| 153 '--enable-gpu' | 101 '--enable-gpu' |
| 154 ], | 102 ], |
| 155 'name': 'performance_browser_tests', | 103 'name': 'performance_browser_tests', |
| 156 'script': 'gtest_perf_test.py', | 104 'script': 'gtest_perf_test.py', |
| 157 'testers': { | 105 'testers': { |
| 158 'chromium.perf': [ | 106 'chromium.perf': [ |
| 159 { | 107 { |
| 160 'name': 'Mac 10.8 Perf', | 108 'name': 'Mac 10.8 Perf', |
| 161 'shards': [3] | 109 'shards': [3] |
| 162 }, | 110 }, |
| 163 { | 111 { |
| 164 'name': 'Mac 10.9 Perf', | 112 'name': 'Mac 10.9 Perf', |
| 165 'shards': [3] | 113 'shards': [3] |
| 166 }, | 114 }, |
| 167 { | 115 { |
| 168 'name': 'Win 7 ATI GPU Perf', | |
| 169 'shards': [2] | |
| 170 }, | |
| 171 { | |
| 172 'name': 'Win 7 Nvidia GPU Perf', | |
| 173 'shards': [2] | |
| 174 }, | |
| 175 { | |
| 176 'name': 'Win 7 Perf', | |
| 177 'shards': [3] | |
| 178 }, | |
| 179 { | |
| 180 'name': 'Win 7 x64 Perf', | |
| 181 'shards': [2] | |
| 182 }, | |
| 183 { | |
| 184 'name': 'Win 8 Perf', | 116 'name': 'Win 8 Perf', |
| 185 'shards': [2] | 117 'shards': [2] |
| 186 }, | 118 }, |
| 187 ] | 119 ] |
| 188 } | 120 } |
| 189 }, | 121 }, |
| 190 { | |
| 191 'args': [ | |
| 192 'angle_perftests', | |
| 193 '--test-launcher-print-test-stdio=always', | |
| 194 '--test-launcher-jobs=1' | |
| 195 ], | |
| 196 'name': 'angle_perftests', | |
| 197 'script': 'gtest_perf_test.py', | |
| 198 'testers': { | |
| 199 'chromium.perf': [ | |
| 200 { | |
| 201 'name': 'Win 7 ATI GPU Perf', | |
| 202 'shards': [2] | |
| 203 }, | |
| 204 { | |
| 205 'name': 'Win 7 Nvidia GPU Perf', | |
| 206 'shards': [2] | |
| 207 }, | |
| 208 ] | |
| 209 } | |
| 210 }, | |
| 211 ] | 122 ] |
| 212 | 123 |
| 213 | 124 |
| 214 def add_tester(waterfall, name, perf_id, platform, target_bits=64, | 125 def add_tester(waterfall, name, perf_id, platform, target_bits=64, |
| 215 num_host_shards=1, num_device_shards=1, swarming=None, | 126 num_host_shards=1, num_device_shards=1, swarming=None, |
| 216 use_whitelist=False): | 127 use_whitelist=False): |
| 217 del perf_id # this will be needed | 128 del perf_id # this will be needed |
| 218 waterfall['testers'][name] = { | 129 waterfall['testers'][name] = { |
| 219 'platform': platform, | 130 'platform': platform, |
| 220 'num_device_shards': num_device_shards, | 131 'num_device_shards': num_device_shards, |
| (...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 295 'android', target_bits=32, num_device_shards=7, num_host_shards=3) | 206 'android', target_bits=32, num_device_shards=7, num_host_shards=3) |
| 296 | 207 |
| 297 waterfall = add_tester( | 208 waterfall = add_tester( |
| 298 waterfall, 'Win Zenbook Perf', 'win-zenbook', 'win', num_host_shards=5) | 209 waterfall, 'Win Zenbook Perf', 'win-zenbook', 'win', num_host_shards=5) |
| 299 waterfall = add_tester( | 210 waterfall = add_tester( |
| 300 waterfall, 'Win 10 Perf', 'chromium-rel-win10', 'win', num_host_shards=5) | 211 waterfall, 'Win 10 Perf', 'chromium-rel-win10', 'win', num_host_shards=5) |
| 301 waterfall = add_tester( | 212 waterfall = add_tester( |
| 302 waterfall, 'Win 8 Perf', 'chromium-rel-win8-dual', 'win', num_host_shards=5) | 213 waterfall, 'Win 8 Perf', 'chromium-rel-win8-dual', 'win', num_host_shards=5) |
| 303 waterfall = add_tester( | 214 waterfall = add_tester( |
| 304 waterfall, 'Win 7 Perf', 'chromium-rel-win7-dual', | 215 waterfall, 'Win 7 Perf', 'chromium-rel-win7-dual', |
| 305 'win', target_bits=32, num_host_shards=5) | 216 'win', target_bits=32, |
| 217 swarming=[ |
| 218 { |
| 219 'gpu': '102b:0532', |
| 220 'os': 'Windows-2008ServerR2-SP1', |
| 221 'device_ids': [ |
| 222 'build185-m1', 'build186-m1', |
| 223 'build187-m1', 'build188-m1', 'build189-m1' |
| 224 ], |
| 225 'perf_tests': [ |
| 226 ('load_library_perf_tests', 2), |
| 227 ('performance_browser_tests', 2)] |
| 228 } |
| 229 ]) |
| 306 waterfall = add_tester( | 230 waterfall = add_tester( |
| 307 waterfall, 'Win 7 x64 Perf', | 231 waterfall, 'Win 7 x64 Perf', |
| 308 'chromium-rel-win7-x64-dual', 'win', num_host_shards=5) | 232 'chromium-rel-win7-x64-dual', 'win', |
| 233 swarming=[ |
| 234 { |
| 235 'gpu': '102b:0532', |
| 236 'os': 'Windows-2008ServerR2-SP1', |
| 237 'device_ids': [ |
| 238 'build138-m1', 'build139-m1', |
| 239 'build140-m1', 'build141-m1', 'build142-m1' |
| 240 ], |
| 241 'perf_tests': [ |
| 242 ('load_library_perf_tests', 2), |
| 243 ('performance_browser_tests', 2)] |
| 244 } |
| 245 ]) |
| 309 waterfall = add_tester( | 246 waterfall = add_tester( |
| 310 waterfall, 'Win 7 ATI GPU Perf', | 247 waterfall, 'Win 7 ATI GPU Perf', |
| 311 'chromium-rel-win7-gpu-ati', 'win', num_host_shards=5) | 248 'chromium-rel-win7-gpu-ati', 'win', |
| 249 swarming=[ |
| 250 { |
| 251 'gpu': '1002:6779', |
| 252 'os': 'Windows-2008ServerR2-SP1', |
| 253 'device_ids': [ |
| 254 'build101-m1', 'build102-m1', |
| 255 'build103-m1', 'build104-m1', 'build105-m1' |
| 256 ], |
| 257 'perf_tests': [ |
| 258 ('angle_perftests', 2), |
| 259 ('load_library_perf_tests', 2), |
| 260 ('performance_browser_tests', 2)] |
| 261 } |
| 262 ]) |
| 312 waterfall = add_tester( | 263 waterfall = add_tester( |
| 313 waterfall, 'Win 7 Intel GPU Perf', | 264 waterfall, 'Win 7 Intel GPU Perf', |
| 314 'chromium-rel-win7-gpu-intel', 'win', num_host_shards=5) | 265 'chromium-rel-win7-gpu-intel', 'win', |
| 266 swarming=[ |
| 267 { |
| 268 'gpu': '8086:041a', |
| 269 'os': 'Windows-2008ServerR2-SP1', |
| 270 'device_ids': [ |
| 271 'build164-m1', 'build165-m1', |
| 272 'build166-m1', 'build167-m1', 'build168-m1' |
| 273 ] |
| 274 } |
| 275 ]) |
| 315 waterfall = add_tester( | 276 waterfall = add_tester( |
| 316 waterfall, 'Win 7 Nvidia GPU Perf', | 277 waterfall, 'Win 7 Nvidia GPU Perf', |
| 317 'chromium-rel-win7-gpu-nvidia', 'win', num_host_shards=5) | 278 'chromium-rel-win7-gpu-nvidia', 'win', |
| 279 swarming=[ |
| 280 { |
| 281 'gpu': '10de:104a', |
| 282 'os': 'Windows-2008ServerR2-SP1', |
| 283 'device_ids': [ |
| 284 'build92-m1', 'build93-m1', |
| 285 'build94-m1', 'build95-m1', 'build96-m1' |
| 286 ], |
| 287 'perf_tests': [ |
| 288 ('angle_perftests', 2), |
| 289 ('load_library_perf_tests', 2), |
| 290 ('performance_browser_tests', 2)] |
| 291 } |
| 292 ]) |
| 318 | 293 |
| 319 waterfall = add_tester( | 294 waterfall = add_tester( |
| 320 waterfall, 'Mac 10.11 Perf', 'chromium-rel-mac11', | 295 waterfall, 'Mac 10.11 Perf', 'chromium-rel-mac11', |
| 321 'mac', num_host_shards=5) | 296 'mac', num_host_shards=5) |
| 322 waterfall = add_tester( | 297 waterfall = add_tester( |
| 323 waterfall, 'Mac 10.10 Perf', 'chromium-rel-mac10', | 298 waterfall, 'Mac 10.10 Perf', 'chromium-rel-mac10', |
| 324 'mac', | 299 'mac', |
| 325 swarming=[ | 300 swarming=[ |
| 326 { | 301 { |
| 327 'os': 'Mac-10.10', | 302 'os': 'Mac-10.10', |
| (...skipping 241 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 569 benchmark_avgs = {} | 544 benchmark_avgs = {} |
| 570 new_benchmarks = [] | 545 new_benchmarks = [] |
| 571 # Load in the avg times as calculated on Nov 1st, 2016 | 546 # Load in the avg times as calculated on Nov 1st, 2016 |
| 572 with open('desktop_benchmark_avg_times.json') as f: | 547 with open('desktop_benchmark_avg_times.json') as f: |
| 573 benchmark_avgs = json.load(f) | 548 benchmark_avgs = json.load(f) |
| 574 | 549 |
| 575 for benchmark in all_benchmarks: | 550 for benchmark in all_benchmarks: |
| 576 benchmark_avg_time = benchmark_avgs.get(benchmark.Name(), None) | 551 benchmark_avg_time = benchmark_avgs.get(benchmark.Name(), None) |
| 577 if benchmark_avg_time is None: | 552 if benchmark_avg_time is None: |
| 578 # Assume that this is a new benchmark that was added after 11/1/16 when | 553 # Assume that this is a new benchmark that was added after 11/1/16 when |
| 579 # we generated the benchmarks. Use the old affinity algorith after | 554 # we generated the benchmarks. Use the old affinity algorithm after |
| 580 # we have given the rest the same distribution, add it to the | 555 # we have given the rest the same distribution, add it to the |
| 581 # new benchmarks list. | 556 # new benchmarks list. |
| 582 print ('Warning: Benchmark %s was not seen in times generated on Nov1 ' | |
| 583 '2016, defaulting to old device affinity algorithm' % benchmark.Name()) | |
| 584 new_benchmarks.append(benchmark) | 557 new_benchmarks.append(benchmark) |
| 585 else: | 558 else: |
| 586 # Need to multiple the seconds by 2 since we will be generating two tests | 559 # Need to multiple the seconds by 2 since we will be generating two tests |
| 587 # for each benchmark to be run on the same shard for the reference build | 560 # for each benchmark to be run on the same shard for the reference build |
| 588 runtime_list.append((benchmark, benchmark_avg_time * 2.0)) | 561 runtime_list.append((benchmark, benchmark_avg_time * 2.0)) |
| 589 | 562 |
| 590 # Return a reverse sorted list by runtime | 563 # Return a reverse sorted list by runtime |
| 591 runtime_list.sort(key=lambda tup: tup[1], reverse=True) | 564 runtime_list.sort(key=lambda tup: tup[1], reverse=True) |
| 592 return runtime_list, new_benchmarks | 565 return runtime_list, new_benchmarks |
| 593 | 566 |
| (...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 679 waterfall['name'] = 'chromium.perf' | 652 waterfall['name'] = 'chromium.perf' |
| 680 fyi_waterfall = get_fyi_waterfall_config() | 653 fyi_waterfall = get_fyi_waterfall_config() |
| 681 fyi_waterfall['name'] = 'chromium.perf.fyi' | 654 fyi_waterfall['name'] = 'chromium.perf.fyi' |
| 682 | 655 |
| 683 generate_all_tests(fyi_waterfall) | 656 generate_all_tests(fyi_waterfall) |
| 684 generate_all_tests(waterfall) | 657 generate_all_tests(waterfall) |
| 685 return 0 | 658 return 0 |
| 686 | 659 |
| 687 if __name__ == '__main__': | 660 if __name__ == '__main__': |
| 688 sys.exit(main()) | 661 sys.exit(main()) |
| OLD | NEW |