| OLD | NEW |
| 1 # Copyright 2016 The Chromium Authors. All rights reserved. | 1 # Copyright 2016 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 | 5 |
| 6 # Recipe module for Skia Swarming trigger. | 6 # Recipe module for Skia Swarming trigger. |
| 7 | 7 |
| 8 | 8 |
| 9 import json | 9 import json |
| 10 | 10 |
| 11 | 11 |
| 12 DEPS = [ | 12 DEPS = [ |
| 13 'depot_tools/gclient', | 13 'depot_tools/gclient', |
| 14 'depot_tools/git', | 14 'depot_tools/git', |
| 15 'depot_tools/tryserver', | 15 'depot_tools/tryserver', |
| 16 'file', | 16 'file', |
| 17 'gsutil', |
| 17 'recipe_engine/path', | 18 'recipe_engine/path', |
| 18 'recipe_engine/properties', | 19 'recipe_engine/properties', |
| 19 'recipe_engine/python', | 20 'recipe_engine/python', |
| 20 'recipe_engine/raw_io', | 21 'recipe_engine/raw_io', |
| 22 'recipe_engine/time', |
| 21 'skia', | 23 'skia', |
| 22 'skia_swarming', | 24 'skia_swarming', |
| 23 ] | 25 ] |
| 24 | 26 |
| 25 | 27 |
| 26 TEST_BUILDERS = { | 28 TEST_BUILDERS = { |
| 27 'client.skia': { | 29 'client.skia': { |
| 28 'skiabot-linux-swarm-012': [ | 30 'skiabot-linux-swarm-012': [ |
| 29 'Test-Ubuntu-GCC-ShuttleA-GPU-GTX550Ti-x86_64-Release-Valgrind', | 31 'Test-Ubuntu-GCC-ShuttleA-GPU-GTX550Ti-x86_64-Release-Valgrind', |
| 30 ], | 32 ], |
| 33 'skiabot-linux-swarm-013': [ |
| 34 'Test-Ubuntu-Clang-GCE-CPU-AVX2-x86_64-Coverage-Trybot', |
| 35 ], |
| 31 }, | 36 }, |
| 32 'client.skia.fyi': { | 37 'client.skia.fyi': { |
| 33 'skiabot-linux-housekeeper-003': [ | 38 'skiabot-linux-housekeeper-003': [ |
| 34 'Build-Mac-Clang-x86_64-Release-Swarming', | 39 'Build-Mac-Clang-x86_64-Release-Swarming', |
| 35 'Build-Ubuntu-GCC-x86_64-Debug-Swarming', | 40 'Build-Ubuntu-GCC-x86_64-Debug-Swarming', |
| 36 'Build-Ubuntu-GCC-x86_64-Release-Swarming-Trybot', | 41 'Build-Ubuntu-GCC-x86_64-Release-Swarming-Trybot', |
| 37 'Build-Win8-MSVC-x86_64-Release-Swarming', | 42 'Build-Win8-MSVC-x86_64-Release-Swarming', |
| 38 'Perf-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Release-Swarming-Trybot', | 43 'Perf-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Release-Swarming-Trybot', |
| 39 'Test-Android-GCC-Nexus7v2-GPU-Tegra3-Arm7-Release-Swarming', | 44 'Test-Android-GCC-Nexus7v2-GPU-Tegra3-Arm7-Release-Swarming', |
| 40 'Test-ChromeOS-GCC-Link-CPU-AVX-x86_64-Debug', | 45 'Test-ChromeOS-GCC-Link-CPU-AVX-x86_64-Debug', |
| (...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 157 extra_args.append('%s=%s' % (k, v)) | 162 extra_args.append('%s=%s' % (k, v)) |
| 158 | 163 |
| 159 isolate_base_dir = api.path['slave_build'] | 164 isolate_base_dir = api.path['slave_build'] |
| 160 dimensions = swarm_dimensions(builder_spec) | 165 dimensions = swarm_dimensions(builder_spec) |
| 161 isolate_blacklist = ['.git', 'out', '*.pyc'] | 166 isolate_blacklist = ['.git', 'out', '*.pyc'] |
| 162 isolate_vars = { | 167 isolate_vars = { |
| 163 'BUILD': api.path['build'], | 168 'BUILD': api.path['build'], |
| 164 'WORKDIR': api.path['slave_build'], | 169 'WORKDIR': api.path['slave_build'], |
| 165 } | 170 } |
| 166 | 171 |
| 172 isolate_file = '%s_skia.isolate' % task_name |
| 173 if 'Coverage' == builder_cfg['configuration']: |
| 174 isolate_file = 'coverage_skia.isolate' |
| 167 return api.skia_swarming.isolate_and_trigger_task( | 175 return api.skia_swarming.isolate_and_trigger_task( |
| 168 infrabots_dir.join('%s_skia.isolate' % task_name), | 176 infrabots_dir.join(isolate_file), |
| 169 isolate_base_dir, | 177 isolate_base_dir, |
| 170 '%s_skia' % task_name, | 178 '%s_skia' % task_name, |
| 171 isolate_vars, | 179 isolate_vars, |
| 172 dimensions, | 180 dimensions, |
| 173 isolate_blacklist=isolate_blacklist, | 181 isolate_blacklist=isolate_blacklist, |
| 174 extra_isolate_hashes=extra_isolate_hashes, | 182 extra_isolate_hashes=extra_isolate_hashes, |
| 175 idempotent=idempotent, | 183 idempotent=idempotent, |
| 176 store_output=store_output, | 184 store_output=store_output, |
| 177 extra_args=extra_args, | 185 extra_args=extra_args, |
| 178 expiration=expiration, | 186 expiration=expiration, |
| (...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 240 """Some builders require longer than the default timeouts. | 248 """Some builders require longer than the default timeouts. |
| 241 | 249 |
| 242 Returns tuple of (expiration, hard_timeout). If those values are None then | 250 Returns tuple of (expiration, hard_timeout). If those values are None then |
| 243 default timeouts should be used. | 251 default timeouts should be used. |
| 244 """ | 252 """ |
| 245 expiration = None | 253 expiration = None |
| 246 hard_timeout = None | 254 hard_timeout = None |
| 247 if 'Valgrind' in builder_cfg.get('extra_config', ''): | 255 if 'Valgrind' in builder_cfg.get('extra_config', ''): |
| 248 expiration = 2*24*60*60 | 256 expiration = 2*24*60*60 |
| 249 hard_timeout = 9*60*60 | 257 hard_timeout = 9*60*60 |
| 258 elif 'Coverage' == builder_cfg['configuration']: |
| 259 hard_timeout = 3*60*60 |
| 250 return expiration, hard_timeout | 260 return expiration, hard_timeout |
| 251 | 261 |
| 252 | 262 |
| 253 def perf_steps_trigger(api, builder_spec, got_revision, infrabots_dir, | 263 def perf_steps_trigger(api, builder_spec, got_revision, infrabots_dir, |
| 254 extra_hashes): | 264 extra_hashes): |
| 255 """Trigger perf tests via Swarming.""" | 265 """Trigger perf tests via Swarming.""" |
| 256 | 266 |
| 257 expiration, hard_timeout = get_timeouts(builder_spec['builder_cfg']) | 267 expiration, hard_timeout = get_timeouts(builder_spec['builder_cfg']) |
| 258 return trigger_task( | 268 return trigger_task( |
| 259 api, | 269 api, |
| (...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 314 'test', | 324 'test', |
| 315 api.properties['buildername'], | 325 api.properties['buildername'], |
| 316 builder_spec, | 326 builder_spec, |
| 317 got_revision, | 327 got_revision, |
| 318 infrabots_dir, | 328 infrabots_dir, |
| 319 extra_isolate_hashes=extra_hashes, | 329 extra_isolate_hashes=extra_hashes, |
| 320 expiration=expiration, | 330 expiration=expiration, |
| 321 hard_timeout=hard_timeout) | 331 hard_timeout=hard_timeout) |
| 322 | 332 |
| 323 | 333 |
| 324 def test_steps_collect(api, task, upload_dm_results, got_revision, is_trybot): | 334 def test_steps_collect(api, task, upload_dm_results, got_revision, is_trybot, |
| 325 """Collect the DM results from Swarming.""" | 335 builder_cfg): |
| 336 """Collect the test results from Swarming.""" |
| 326 # Wait for tests to finish, download the results. | 337 # Wait for tests to finish, download the results. |
| 327 api.file.rmtree('results_dir', task.task_output_dir, infra_step=True) | 338 api.file.rmtree('results_dir', task.task_output_dir, infra_step=True) |
| 328 api.skia_swarming.collect_swarming_task(task) | 339 api.skia_swarming.collect_swarming_task(task) |
| 329 | 340 |
| 330 # Upload the results. | 341 # Upload the results. |
| 331 if upload_dm_results: | 342 if upload_dm_results: |
| 332 dm_dir = api.path['slave_build'].join('dm') | 343 dm_dir = api.path['slave_build'].join('dm') |
| 333 dm_src = task.task_output_dir.join('0', 'dm') | 344 dm_src = task.task_output_dir.join('0', 'dm') |
| 334 api.file.rmtree('dm_dir', dm_dir, infra_step=True) | 345 api.file.rmtree('dm_dir', dm_dir, infra_step=True) |
| 335 api.file.copytree('dm_dir', dm_src, dm_dir, infra_step=True) | 346 api.file.copytree('dm_dir', dm_src, dm_dir, infra_step=True) |
| 336 | 347 |
| 337 # Upload them to Google Storage. | 348 # Upload them to Google Storage. |
| 338 api.python( | 349 api.python( |
| 339 'Upload DM Results', | 350 'Upload DM Results', |
| 340 script=api.skia.resource('upload_dm_results.py'), | 351 script=api.skia.resource('upload_dm_results.py'), |
| 341 args=[ | 352 args=[ |
| 342 dm_dir, | 353 dm_dir, |
| 343 got_revision, | 354 got_revision, |
| 344 api.properties['buildername'], | 355 api.properties['buildername'], |
| 345 api.properties['buildnumber'], | 356 api.properties['buildnumber'], |
| 346 api.properties['issue'] if is_trybot else '', | 357 api.properties['issue'] if is_trybot else '', |
| 347 api.path['slave_build'].join('skia', 'common', 'py', 'utils'), | 358 api.path['slave_build'].join('skia', 'common', 'py', 'utils'), |
| 348 ], | 359 ], |
| 349 cwd=api.path['checkout'], | 360 cwd=api.path['checkout'], |
| 350 env=api.skia.gsutil_env('chromium-skia-gm.boto'), | 361 env=api.skia.gsutil_env('chromium-skia-gm.boto'), |
| 351 infra_step=True) | 362 infra_step=True) |
| 352 | 363 |
| 364 if builder_cfg['configuration'] == 'Coverage': |
| 365 upload_coverage_results(api, task, got_revision, is_trybot) |
| 366 |
| 367 |
| 368 def upload_coverage_results(api, task, got_revision, is_trybot): |
| 369 results_dir = task.task_output_dir.join('0') |
| 370 git_timestamp = api.git.get_timestamp(test_data='1408633190', |
| 371 infra_step=True) |
| 372 |
| 373 # Upload raw coverage data. |
| 374 cov_file_basename = '%s.cov' % got_revision |
| 375 cov_file = results_dir.join(cov_file_basename) |
| 376 now = api.time.utcnow() |
| 377 gs_json_path = '/'.join(( |
| 378 str(now.year).zfill(4), str(now.month).zfill(2), |
| 379 str(now.day).zfill(2), str(now.hour).zfill(2), |
| 380 api.properties['buildername'], |
| 381 str(api.properties['buildnumber']))) |
| 382 if is_trybot: |
| 383 gs_json_path = '/'.join(('trybot', gs_json_path, |
| 384 str(api.properties['issue']))) |
| 385 api.gsutil.upload( |
| 386 name='upload raw coverage data', |
| 387 source=cov_file, |
| 388 bucket='skia-infra', |
| 389 dest='/'.join(('coverage-raw-v1', gs_json_path, |
| 390 cov_file_basename)), |
| 391 env={'AWS_CREDENTIAL_FILE': None, 'BOTO_CONFIG': None}, |
| 392 ) |
| 393 |
| 394 # Transform the nanobench_${git_hash}.json file received from swarming bot |
| 395 # into the nanobench_${git_hash}_${timestamp}.json file |
| 396 # upload_bench_results.py expects. |
| 397 src_nano_file = results_dir.join('nanobench_%s.json' % got_revision) |
| 398 dst_nano_file = results_dir.join( |
| 399 'nanobench_%s_%s.json' % (got_revision, git_timestamp)) |
| 400 api.file.copy('nanobench JSON', src_nano_file, dst_nano_file, |
| 401 infra_step=True) |
| 402 api.file.remove('old nanobench JSON', src_nano_file) |
| 403 |
| 404 # Upload nanobench JSON data. |
| 405 gsutil_path = api.path['depot_tools'].join( |
| 406 'third_party', 'gsutil', 'gsutil') |
| 407 upload_args = [api.properties['buildername'], api.properties['buildnumber'], |
| 408 results_dir, got_revision, gsutil_path] |
| 409 if is_trybot: |
| 410 upload_args.append(api.properties['issue']) |
| 411 api.python( |
| 412 'upload nanobench coverage results', |
| 413 script=api.skia.resource('upload_bench_results.py'), |
| 414 args=upload_args, |
| 415 cwd=api.path['checkout'], |
| 416 env=api.skia.gsutil_env('chromium-skia-gm.boto'), |
| 417 infra_step=True) |
| 418 |
| 419 # Transform the coverage_by_line_${git_hash}.json file received from |
| 420 # swarming bot into a coverage_by_line_${git_hash}_${timestamp}.json file. |
| 421 src_lbl_file = results_dir.join('coverage_by_line_%s.json' % got_revision) |
| 422 dst_lbl_file_basename = 'coverage_by_line_%s_%s.json' % ( |
| 423 got_revision, git_timestamp) |
| 424 dst_lbl_file = results_dir.join(dst_lbl_file_basename) |
| 425 api.file.copy('Line-by-line coverage JSON', src_lbl_file, dst_lbl_file, |
| 426 infra_step=True) |
| 427 api.file.remove('old line-by-line coverage JSON', src_lbl_file) |
| 428 |
| 429 # Upload line-by-line coverage data. |
| 430 api.gsutil.upload( |
| 431 name='upload line-by-line coverage data', |
| 432 source=dst_lbl_file, |
| 433 bucket='skia-infra', |
| 434 dest='/'.join(('coverage-json-v1', gs_json_path, |
| 435 dst_lbl_file_basename)), |
| 436 env={'AWS_CREDENTIAL_FILE': None, 'BOTO_CONFIG': None}, |
| 437 ) |
| 438 |
| 353 | 439 |
| 354 def RunSteps(api): | 440 def RunSteps(api): |
| 355 got_revision = checkout_steps(api) | 441 got_revision = checkout_steps(api) |
| 356 builder_spec = api.skia.get_builder_spec(api.path['checkout'], | 442 builder_spec = api.skia.get_builder_spec(api.path['checkout'], |
| 357 api.properties['buildername']) | 443 api.properties['buildername']) |
| 358 builder_cfg = builder_spec['builder_cfg'] | 444 builder_cfg = builder_spec['builder_cfg'] |
| 359 infrabots_dir = api.path['checkout'].join('infra', 'bots') | 445 infrabots_dir = api.path['checkout'].join('infra', 'bots') |
| 360 | 446 |
| 361 api.skia_swarming.setup( | 447 api.skia_swarming.setup( |
| 362 api.path['checkout'].join('infra', 'bots', 'tools', 'luci-go'), | 448 api.path['checkout'].join('infra', 'bots', 'tools', 'luci-go'), |
| 363 swarming_rev='') | 449 swarming_rev='') |
| 364 | 450 |
| 365 recipes_hash = isolate_recipes(api) | 451 recipes_hash = isolate_recipes(api) |
| 366 | 452 |
| 367 compile_hash = compile_steps_swarm(api, builder_spec, got_revision, | 453 do_compile_steps = builder_spec.get('do_compile_steps', True) |
| 368 infrabots_dir, [recipes_hash]) | 454 compile_hash = None |
| 455 if do_compile_steps: |
| 456 compile_hash = compile_steps_swarm(api, builder_spec, got_revision, |
| 457 infrabots_dir, [recipes_hash]) |
| 369 | 458 |
| 370 do_test_steps = builder_spec['do_test_steps'] | 459 do_test_steps = builder_spec['do_test_steps'] |
| 371 do_perf_steps = builder_spec['do_perf_steps'] | 460 do_perf_steps = builder_spec['do_perf_steps'] |
| 372 | 461 |
| 373 if not (do_test_steps or do_perf_steps): | 462 if not (do_test_steps or do_perf_steps): |
| 374 return | 463 return |
| 375 | 464 |
| 376 extra_hashes = [recipes_hash, compile_hash] | 465 extra_hashes = [recipes_hash] |
| 466 if compile_hash: |
| 467 extra_hashes.append(compile_hash) |
| 377 | 468 |
| 378 api.skia.download_skps(api.path['slave_build'].join('tmp'), | 469 api.skia.download_skps(api.path['slave_build'].join('tmp'), |
| 379 api.path['slave_build'].join('skps'), | 470 api.path['slave_build'].join('skps'), |
| 380 False) | 471 False) |
| 381 api.skia.download_images(api.path['slave_build'].join('tmp'), | 472 api.skia.download_images(api.path['slave_build'].join('tmp'), |
| 382 api.path['slave_build'].join('images'), | 473 api.path['slave_build'].join('images'), |
| 383 False) | 474 False) |
| 384 | 475 |
| 385 test_task = None | 476 test_task = None |
| 386 perf_task = None | 477 perf_task = None |
| 387 if do_test_steps: | 478 if do_test_steps: |
| 388 test_task = test_steps_trigger(api, builder_spec, got_revision, | 479 test_task = test_steps_trigger(api, builder_spec, got_revision, |
| 389 infrabots_dir, extra_hashes) | 480 infrabots_dir, extra_hashes) |
| 390 if do_perf_steps: | 481 if do_perf_steps: |
| 391 perf_task = perf_steps_trigger(api, builder_spec, got_revision, | 482 perf_task = perf_steps_trigger(api, builder_spec, got_revision, |
| 392 infrabots_dir, extra_hashes) | 483 infrabots_dir, extra_hashes) |
| 393 is_trybot = builder_cfg['is_trybot'] | 484 is_trybot = builder_cfg['is_trybot'] |
| 394 if test_task: | 485 if test_task: |
| 395 test_steps_collect(api, test_task, builder_spec['upload_dm_results'], | 486 test_steps_collect(api, test_task, builder_spec['upload_dm_results'], |
| 396 got_revision, is_trybot) | 487 got_revision, is_trybot, builder_cfg) |
| 397 if perf_task: | 488 if perf_task: |
| 398 perf_steps_collect(api, perf_task, builder_spec['upload_perf_results'], | 489 perf_steps_collect(api, perf_task, builder_spec['upload_perf_results'], |
| 399 got_revision, is_trybot) | 490 got_revision, is_trybot) |
| 400 | 491 |
| 401 | 492 |
| 402 def test_for_bot(api, builder, mastername, slavename, testname=None): | 493 def test_for_bot(api, builder, mastername, slavename, testname=None): |
| 403 """Generate a test for the given bot.""" | 494 """Generate a test for the given bot.""" |
| 404 testname = testname or builder | 495 testname = testname or builder |
| 405 test = ( | 496 test = ( |
| 406 api.test(testname) + | 497 api.test(testname) + |
| 407 api.properties(buildername=builder, | 498 api.properties(buildername=builder, |
| 408 mastername=mastername, | 499 mastername=mastername, |
| 409 slavename=slavename, | 500 slavename=slavename, |
| 410 buildnumber=5, | 501 buildnumber=5, |
| 411 revision='abc123') + | 502 revision='abc123') + |
| 412 api.path.exists( | 503 api.path.exists( |
| 413 api.path['slave_build'].join('skia'), | 504 api.path['slave_build'].join('skia'), |
| 414 api.path['slave_build'].join('tmp', 'uninteresting_hashes.txt') | 505 api.path['slave_build'].join('tmp', 'uninteresting_hashes.txt') |
| 415 ) | 506 ) |
| 416 ) | 507 ) |
| 417 if 'Trybot' in builder: | 508 if 'Trybot' in builder: |
| 418 test += api.properties(issue=500, | 509 test += api.properties(issue=500, |
| 419 patchset=1, | 510 patchset=1, |
| 420 rietveld='https://codereview.chromium.org') | 511 rietveld='https://codereview.chromium.org') |
| 421 test += api.step_data( | 512 if 'Coverage' not in builder: |
| 422 'upload new .isolated file for compile_skia', | 513 test += api.step_data( |
| 423 stdout=api.raw_io.output('def456 XYZ.isolated')) | 514 'upload new .isolated file for compile_skia', |
| 515 stdout=api.raw_io.output('def456 XYZ.isolated')) |
| 424 if 'Test' in builder: | 516 if 'Test' in builder: |
| 425 test += api.step_data( | 517 test += api.step_data( |
| 426 'upload new .isolated file for test_skia', | 518 'upload new .isolated file for test_skia', |
| 427 stdout=api.raw_io.output('def456 XYZ.isolated')) | 519 stdout=api.raw_io.output('def456 XYZ.isolated')) |
| 428 if ('Test' in builder and 'Debug' in builder) or 'Perf' in builder or ( | 520 if ('Test' in builder and 'Debug' in builder) or 'Perf' in builder or ( |
| 429 'Valgrind' in builder and 'Test' in builder): | 521 'Valgrind' in builder and 'Test' in builder): |
| 430 test += api.step_data( | 522 test += api.step_data( |
| 431 'upload new .isolated file for perf_skia', | 523 'upload new .isolated file for perf_skia', |
| 432 stdout=api.raw_io.output('def456 XYZ.isolated')) | 524 stdout=api.raw_io.output('def456 XYZ.isolated')) |
| 433 | 525 |
| 434 return test | 526 return test |
| 435 | 527 |
| 436 | 528 |
| 437 def GenTests(api): | 529 def GenTests(api): |
| 438 for mastername, slaves in TEST_BUILDERS.iteritems(): | 530 for mastername, slaves in TEST_BUILDERS.iteritems(): |
| 439 for slavename, builders_by_slave in slaves.iteritems(): | 531 for slavename, builders_by_slave in slaves.iteritems(): |
| 440 for builder in builders_by_slave: | 532 for builder in builders_by_slave: |
| 441 yield test_for_bot(api, builder, mastername, slavename) | 533 yield test_for_bot(api, builder, mastername, slavename) |
| 442 | 534 |
| 443 builder = 'Test-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Debug-Swarming' | 535 builder = 'Test-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Debug-Swarming' |
| 444 master = 'client.skia' | 536 master = 'client.skia' |
| 445 slave = 'skiabot-linux-test-000' | 537 slave = 'skiabot-linux-test-000' |
| 446 test = test_for_bot(api, builder, master, slave, 'No_downloaded_SKP_VERSION') | 538 test = test_for_bot(api, builder, master, slave, 'No_downloaded_SKP_VERSION') |
| 447 test += api.step_data('Get downloaded SKP_VERSION', retcode=1) | 539 test += api.step_data('Get downloaded SKP_VERSION', retcode=1) |
| 448 test += api.path.exists( | 540 test += api.path.exists( |
| 449 api.path['slave_build'].join('skia'), | 541 api.path['slave_build'].join('skia'), |
| 450 api.path['slave_build'].join('tmp', 'uninteresting_hashes.txt') | 542 api.path['slave_build'].join('tmp', 'uninteresting_hashes.txt') |
| 451 ) | 543 ) |
| 452 yield test | 544 yield test |
| OLD | NEW |