Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 # Copyright 2016 The Chromium Authors. All rights reserved. | 1 # Copyright 2016 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 | 5 |
| 6 # Recipe module for Skia Swarming trigger. | 6 # Recipe module for Skia Swarming trigger. |
| 7 | 7 |
| 8 | 8 |
| 9 import os | 9 import os |
| 10 import json | 10 import json |
| (...skipping 175 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 186 properties = { | 186 properties = { |
| 187 'buildername': builder, | 187 'buildername': builder, |
| 188 'mastername': master, | 188 'mastername': master, |
| 189 'buildnumber': buildnumber, | 189 'buildnumber': buildnumber, |
| 190 'reason': 'Triggered by Skia swarm_trigger Recipe', | 190 'reason': 'Triggered by Skia swarm_trigger Recipe', |
| 191 'revision': got_revision, | 191 'revision': got_revision, |
| 192 'slavename': slave, | 192 'slavename': slave, |
| 193 'swarm_out_dir': '${ISOLATED_OUTDIR}', | 193 'swarm_out_dir': '${ISOLATED_OUTDIR}', |
| 194 } | 194 } |
| 195 if builder_cfg['is_trybot']: | 195 if builder_cfg['is_trybot']: |
| 196 properties['issue'] = str(api.properties['issue']) | 196 if api.properties.get('patch_storage') == 'gerrit': |
| 197 properties['patchset'] = str(api.properties['patchset']) | 197 properties['patch_storage'] = api.properties['patch_storage'] |
| 198 properties['rietveld'] = api.properties['rietveld'] | 198 properties['repository'] = api.properties['repository'] |
| 199 properties['event.patchSet.ref'] = api.properties['event.patchSet.ref'] | |
| 200 properties['event.change.number'] = api.properties['event.change.number'] | |
| 201 else: | |
| 202 properties['issue'] = str(api.properties['issue']) | |
| 203 properties['patchset'] = str(api.properties['patchset']) | |
| 204 properties['rietveld'] = api.properties['rietveld'] | |
| 199 | 205 |
| 200 extra_args = [ | 206 extra_args = [ |
| 201 '--workdir', '../../..', | 207 '--workdir', '../../..', |
| 202 'swarm_%s' % task_name, | 208 'swarm_%s' % task_name, |
| 203 ] | 209 ] |
| 204 for k, v in properties.iteritems(): | 210 for k, v in properties.iteritems(): |
| 205 extra_args.append('%s=%s' % (k, v)) | 211 extra_args.append('%s=%s' % (k, v)) |
| 206 | 212 |
| 207 isolate_base_dir = api.path['slave_build'] | 213 isolate_base_dir = api.path['slave_build'] |
| 208 dimensions = swarm_dimensions(builder_cfg) | 214 dimensions = swarm_dimensions(builder_cfg) |
| (...skipping 202 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 411 if api.path._test_data.enabled: | 417 if api.path._test_data.enabled: |
| 412 home_dir = '[HOME]' | 418 home_dir = '[HOME]' |
| 413 | 419 |
| 414 boto_path = None | 420 boto_path = None |
| 415 if boto_file: | 421 if boto_file: |
| 416 boto_path = api.path.join(home_dir, boto_file) | 422 boto_path = api.path.join(home_dir, boto_file) |
| 417 return {'AWS_CREDENTIAL_FILE': boto_path, | 423 return {'AWS_CREDENTIAL_FILE': boto_path, |
| 418 'BOTO_CONFIG': boto_path} | 424 'BOTO_CONFIG': boto_path} |
| 419 | 425 |
| 420 | 426 |
| 427 def get_issue_num(api): | |
| 428 if api.properties.get('patch_storage') == 'gerrit': | |
| 429 return str(api.properties['event.change.number']) | |
| 430 else: | |
| 431 return str(api.properties['issue']) | |
| 432 | |
| 433 | |
| 421 def perf_steps_trigger(api, builder_cfg, got_revision, infrabots_dir, | 434 def perf_steps_trigger(api, builder_cfg, got_revision, infrabots_dir, |
| 422 extra_hashes, cipd_packages): | 435 extra_hashes, cipd_packages): |
| 423 """Trigger perf tests via Swarming.""" | 436 """Trigger perf tests via Swarming.""" |
| 424 | 437 |
| 425 expiration, hard_timeout, io_timeout = get_timeouts(builder_cfg) | 438 expiration, hard_timeout, io_timeout = get_timeouts(builder_cfg) |
| 426 return trigger_task( | 439 return trigger_task( |
| 427 api, | 440 api, |
| 428 'perf', | 441 'perf', |
| 429 api.properties['buildername'], | 442 api.properties['buildername'], |
| 430 api.properties['mastername'], | 443 api.properties['mastername'], |
| (...skipping 30 matching lines...) Expand all Loading... | |
| 461 'nanobench_%s_%s.json' % (got_revision, git_timestamp)) | 474 'nanobench_%s_%s.json' % (got_revision, git_timestamp)) |
| 462 api.file.copy('perf_results', src_results_file, dst_results_file, | 475 api.file.copy('perf_results', src_results_file, dst_results_file, |
| 463 infra_step=True) | 476 infra_step=True) |
| 464 | 477 |
| 465 gsutil_path = api.path['slave_build'].join( | 478 gsutil_path = api.path['slave_build'].join( |
| 466 'skia', 'infra', 'bots', '.recipe_deps', 'depot_tools', 'third_party', | 479 'skia', 'infra', 'bots', '.recipe_deps', 'depot_tools', 'third_party', |
| 467 'gsutil', 'gsutil') | 480 'gsutil', 'gsutil') |
| 468 upload_args = [api.properties['buildername'], api.properties['buildnumber'], | 481 upload_args = [api.properties['buildername'], api.properties['buildnumber'], |
| 469 perf_data_dir, got_revision, gsutil_path] | 482 perf_data_dir, got_revision, gsutil_path] |
| 470 if is_trybot: | 483 if is_trybot: |
| 471 upload_args.append(api.properties['issue']) | 484 upload_args.append(get_issue_num(api)) |
|
borenet
2016/08/23 18:45:33
I'm just realizing that it's weird we don't includ
rmistry
2016/08/23 19:04:37
We do include the patchset in the DM and nanobench
borenet
2016/08/23 19:06:29
Gotcha.
| |
| 472 api.python( | 485 api.python( |
| 473 'Upload perf results', | 486 'Upload perf results', |
| 474 script=api.core.resource('upload_bench_results.py'), | 487 script=api.core.resource('upload_bench_results.py'), |
| 475 args=upload_args, | 488 args=upload_args, |
| 476 cwd=api.path['checkout'], | 489 cwd=api.path['checkout'], |
| 477 infra_step=True) | 490 infra_step=True) |
| 478 | 491 |
| 479 | 492 |
| 480 def test_steps_trigger(api, builder_cfg, got_revision, infrabots_dir, | 493 def test_steps_trigger(api, builder_cfg, got_revision, infrabots_dir, |
| 481 extra_hashes, cipd_packages): | 494 extra_hashes, cipd_packages): |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 513 | 526 |
| 514 # Upload them to Google Storage. | 527 # Upload them to Google Storage. |
| 515 api.python( | 528 api.python( |
| 516 'Upload DM Results', | 529 'Upload DM Results', |
| 517 script=api.core.resource('upload_dm_results.py'), | 530 script=api.core.resource('upload_dm_results.py'), |
| 518 args=[ | 531 args=[ |
| 519 dm_dir, | 532 dm_dir, |
| 520 got_revision, | 533 got_revision, |
| 521 api.properties['buildername'], | 534 api.properties['buildername'], |
| 522 api.properties['buildnumber'], | 535 api.properties['buildnumber'], |
| 523 api.properties['issue'] if is_trybot else '', | 536 get_issue_num(api) if is_trybot else '', |
| 524 api.path['slave_build'].join('skia', 'common', 'py', 'utils'), | 537 api.path['slave_build'].join('skia', 'common', 'py', 'utils'), |
| 525 ], | 538 ], |
| 526 cwd=api.path['checkout'], | 539 cwd=api.path['checkout'], |
| 527 env=gsutil_env(api, 'chromium-skia-gm.boto'), | 540 env=gsutil_env(api, 'chromium-skia-gm.boto'), |
| 528 infra_step=True) | 541 infra_step=True) |
| 529 | 542 |
| 530 if builder_cfg['configuration'] == 'Coverage': | 543 if builder_cfg['configuration'] == 'Coverage': |
| 531 upload_coverage_results(api, task, got_revision, is_trybot) | 544 upload_coverage_results(api, task, got_revision, is_trybot) |
| 532 | 545 |
| 533 | 546 |
| 534 def upload_coverage_results(api, task, got_revision, is_trybot): | 547 def upload_coverage_results(api, task, got_revision, is_trybot): |
| 535 results_dir = task.task_output_dir.join('0') | 548 results_dir = task.task_output_dir.join('0') |
| 536 git_timestamp = api.git.get_timestamp(test_data='1408633190', | 549 git_timestamp = api.git.get_timestamp(test_data='1408633190', |
| 537 infra_step=True) | 550 infra_step=True) |
| 538 | 551 |
| 539 # Upload raw coverage data. | 552 # Upload raw coverage data. |
| 540 cov_file_basename = '%s.cov' % got_revision | 553 cov_file_basename = '%s.cov' % got_revision |
| 541 cov_file = results_dir.join(cov_file_basename) | 554 cov_file = results_dir.join(cov_file_basename) |
| 542 now = api.time.utcnow() | 555 now = api.time.utcnow() |
| 543 gs_json_path = '/'.join(( | 556 gs_json_path = '/'.join(( |
| 544 str(now.year).zfill(4), str(now.month).zfill(2), | 557 str(now.year).zfill(4), str(now.month).zfill(2), |
| 545 str(now.day).zfill(2), str(now.hour).zfill(2), | 558 str(now.day).zfill(2), str(now.hour).zfill(2), |
| 546 api.properties['buildername'], | 559 api.properties['buildername'], |
| 547 str(api.properties['buildnumber']))) | 560 str(api.properties['buildnumber']))) |
| 548 if is_trybot: | 561 if is_trybot: |
| 549 gs_json_path = '/'.join(('trybot', gs_json_path, | 562 gs_json_path = '/'.join(('trybot', gs_json_path, get_issue_num(api))) |
| 550 str(api.properties['issue']))) | |
| 551 api.gsutil.upload( | 563 api.gsutil.upload( |
| 552 name='upload raw coverage data', | 564 name='upload raw coverage data', |
| 553 source=cov_file, | 565 source=cov_file, |
| 554 bucket='skia-infra', | 566 bucket='skia-infra', |
| 555 dest='/'.join(('coverage-raw-v1', gs_json_path, | 567 dest='/'.join(('coverage-raw-v1', gs_json_path, |
| 556 cov_file_basename)), | 568 cov_file_basename)), |
| 557 env={'AWS_CREDENTIAL_FILE': None, 'BOTO_CONFIG': None}, | 569 env={'AWS_CREDENTIAL_FILE': None, 'BOTO_CONFIG': None}, |
| 558 ) | 570 ) |
| 559 | 571 |
| 560 # Transform the nanobench_${git_hash}.json file received from swarming bot | 572 # Transform the nanobench_${git_hash}.json file received from swarming bot |
| 561 # into the nanobench_${git_hash}_${timestamp}.json file | 573 # into the nanobench_${git_hash}_${timestamp}.json file |
| 562 # upload_bench_results.py expects. | 574 # upload_bench_results.py expects. |
| 563 src_nano_file = results_dir.join('nanobench_%s.json' % got_revision) | 575 src_nano_file = results_dir.join('nanobench_%s.json' % got_revision) |
| 564 dst_nano_file = results_dir.join( | 576 dst_nano_file = results_dir.join( |
| 565 'nanobench_%s_%s.json' % (got_revision, git_timestamp)) | 577 'nanobench_%s_%s.json' % (got_revision, git_timestamp)) |
| 566 api.file.copy('nanobench JSON', src_nano_file, dst_nano_file, | 578 api.file.copy('nanobench JSON', src_nano_file, dst_nano_file, |
| 567 infra_step=True) | 579 infra_step=True) |
| 568 api.file.remove('old nanobench JSON', src_nano_file) | 580 api.file.remove('old nanobench JSON', src_nano_file) |
| 569 | 581 |
| 570 # Upload nanobench JSON data. | 582 # Upload nanobench JSON data. |
| 571 gsutil_path = api.depot_tools.gsutil_py_path | 583 gsutil_path = api.depot_tools.gsutil_py_path |
| 572 upload_args = [api.properties['buildername'], api.properties['buildnumber'], | 584 upload_args = [api.properties['buildername'], api.properties['buildnumber'], |
| 573 results_dir, got_revision, gsutil_path] | 585 results_dir, got_revision, gsutil_path] |
| 574 if is_trybot: | 586 if is_trybot: |
| 575 upload_args.append(api.properties['issue']) | 587 upload_args.append(get_issue_num(api)) |
| 576 api.python( | 588 api.python( |
| 577 'upload nanobench coverage results', | 589 'upload nanobench coverage results', |
| 578 script=api.core.resource('upload_bench_results.py'), | 590 script=api.core.resource('upload_bench_results.py'), |
| 579 args=upload_args, | 591 args=upload_args, |
| 580 cwd=api.path['checkout'], | 592 cwd=api.path['checkout'], |
| 581 env=gsutil_env(api, 'chromium-skia-gm.boto'), | 593 env=gsutil_env(api, 'chromium-skia-gm.boto'), |
| 582 infra_step=True) | 594 infra_step=True) |
| 583 | 595 |
| 584 # Transform the coverage_by_line_${git_hash}.json file received from | 596 # Transform the coverage_by_line_${git_hash}.json file received from |
| 585 # swarming bot into a coverage_by_line_${git_hash}_${timestamp}.json file. | 597 # swarming bot into a coverage_by_line_${git_hash}_${timestamp}.json file. |
| (...skipping 180 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 766 test += api.path.exists(*paths) | 778 test += api.path.exists(*paths) |
| 767 | 779 |
| 768 return test | 780 return test |
| 769 | 781 |
| 770 | 782 |
| 771 def GenTests(api): | 783 def GenTests(api): |
| 772 for mastername, slaves in TEST_BUILDERS.iteritems(): | 784 for mastername, slaves in TEST_BUILDERS.iteritems(): |
| 773 for slavename, builders_by_slave in slaves.iteritems(): | 785 for slavename, builders_by_slave in slaves.iteritems(): |
| 774 for builder in builders_by_slave: | 786 for builder in builders_by_slave: |
| 775 yield test_for_bot(api, builder, mastername, slavename) | 787 yield test_for_bot(api, builder, mastername, slavename) |
| 788 | |
| 789 gerrit_kwargs = { | |
| 790 'patch_storage': 'gerrit', | |
| 791 'repository': 'skia', | |
| 792 'event.patchSet.ref': 'refs/changes/00/2100/2', | |
| 793 'event.change.number': '2100', | |
| 794 } | |
| 795 yield ( | |
| 796 api.test('recipe_with_gerrit_patch') + | |
| 797 api.properties( | |
| 798 buildername='Test-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Debug-Trybot', | |
| 799 mastername='client.skia', | |
| 800 slavename='skiabot-linux-swarm-000', | |
| 801 buildnumber=5, | |
| 802 path_config='kitchen', | |
| 803 revision='abc123', | |
| 804 **gerrit_kwargs) + | |
| 805 api.step_data( | |
| 806 'upload new .isolated file for test_skia', | |
| 807 stdout=api.raw_io.output('def456 XYZ.isolated')) | |
| 808 ) | |
| OLD | NEW |