| OLD | NEW |
| 1 # Copyright 2016 The Chromium Authors. All rights reserved. | 1 # Copyright 2016 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 | 5 |
| 6 # Recipe module for Skia Swarming trigger. | 6 # Recipe module for Skia Swarming trigger. |
| 7 | 7 |
| 8 | 8 |
| 9 import json | 9 import json |
| 10 | 10 |
| 11 | 11 |
| 12 DEPS = [ | 12 DEPS = [ |
| 13 'core', |
| 13 'build/file', | 14 'build/file', |
| 14 'build/gsutil', | 15 'build/gsutil', |
| 15 'depot_tools/depot_tools', | 16 'depot_tools/depot_tools', |
| 16 'depot_tools/git', | 17 'depot_tools/git', |
| 17 'depot_tools/tryserver', | 18 'depot_tools/tryserver', |
| 18 'recipe_engine/json', | 19 'recipe_engine/json', |
| 19 'recipe_engine/path', | 20 'recipe_engine/path', |
| 20 'recipe_engine/properties', | 21 'recipe_engine/properties', |
| 21 'recipe_engine/python', | 22 'recipe_engine/python', |
| 22 'recipe_engine/raw_io', | 23 'recipe_engine/raw_io', |
| 23 'recipe_engine/step', | 24 'recipe_engine/step', |
| 24 'recipe_engine/time', | 25 'recipe_engine/time', |
| 25 'skia', | 26 'run', |
| 26 'skia_swarming', | 27 'swarming', |
| 28 'vars', |
| 27 ] | 29 ] |
| 28 | 30 |
| 29 | 31 |
| 30 TEST_BUILDERS = { | 32 TEST_BUILDERS = { |
| 31 'client.skia': { | 33 'client.skia': { |
| 32 'skiabot-linux-swarm-000': [ | 34 'skiabot-linux-swarm-000': [ |
| 33 'Test-Ubuntu-GCC-ShuttleA-GPU-GTX550Ti-x86_64-Release-Valgrind', | 35 'Test-Ubuntu-GCC-ShuttleA-GPU-GTX550Ti-x86_64-Release-Valgrind', |
| 34 'Test-Ubuntu-Clang-GCE-CPU-AVX2-x86_64-Coverage-Trybot', | 36 'Test-Ubuntu-Clang-GCE-CPU-AVX2-x86_64-Coverage-Trybot', |
| 35 'Build-Mac-Clang-x86_64-Release', | 37 'Build-Mac-Clang-x86_64-Release', |
| 36 'Build-Ubuntu-GCC-Arm64-Debug-Android_Vulkan', | 38 'Build-Ubuntu-GCC-Arm64-Debug-Android_Vulkan', |
| (...skipping 152 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 189 isolate_blacklist = ['.git', 'out', '*.pyc', '.recipe_deps'] | 191 isolate_blacklist = ['.git', 'out', '*.pyc', '.recipe_deps'] |
| 190 isolate_vars = { | 192 isolate_vars = { |
| 191 'WORKDIR': api.path['slave_build'], | 193 'WORKDIR': api.path['slave_build'], |
| 192 } | 194 } |
| 193 | 195 |
| 194 isolate_file = '%s_skia.isolate' % task_name | 196 isolate_file = '%s_skia.isolate' % task_name |
| 195 if 'Coverage' == builder_cfg.get('configuration'): | 197 if 'Coverage' == builder_cfg.get('configuration'): |
| 196 isolate_file = 'coverage_skia.isolate' | 198 isolate_file = 'coverage_skia.isolate' |
| 197 if 'RecreateSKPs' in builder: | 199 if 'RecreateSKPs' in builder: |
| 198 isolate_file = 'compile_skia.isolate' | 200 isolate_file = 'compile_skia.isolate' |
| 199 return api.skia_swarming.isolate_and_trigger_task( | 201 return api.swarming.isolate_and_trigger_task( |
| 200 infrabots_dir.join(isolate_file), | 202 infrabots_dir.join(isolate_file), |
| 201 isolate_base_dir, | 203 isolate_base_dir, |
| 202 '%s_skia' % task_name, | 204 '%s_skia' % task_name, |
| 203 isolate_vars, | 205 isolate_vars, |
| 204 dimensions, | 206 dimensions, |
| 205 isolate_blacklist=isolate_blacklist, | 207 isolate_blacklist=isolate_blacklist, |
| 206 extra_isolate_hashes=extra_isolate_hashes, | 208 extra_isolate_hashes=extra_isolate_hashes, |
| 207 idempotent=idempotent, | 209 idempotent=idempotent, |
| 208 store_output=store_output, | 210 store_output=store_output, |
| 209 extra_args=extra_args, | 211 extra_args=extra_args, |
| (...skipping 14 matching lines...) Expand all Loading... |
| 224 step_test_data=lambda: api.raw_io.test_api.stream_output('abc123\n'), | 226 step_test_data=lambda: api.raw_io.test_api.stream_output('abc123\n'), |
| 225 ).stdout.rstrip() | 227 ).stdout.rstrip() |
| 226 cmd = ['python', '-c', '"print \'%s\'"' % got_revision] | 228 cmd = ['python', '-c', '"print \'%s\'"' % got_revision] |
| 227 res = api.step('got_revision', cmd=cmd) | 229 res = api.step('got_revision', cmd=cmd) |
| 228 res.presentation.properties['got_revision'] = got_revision | 230 res.presentation.properties['got_revision'] = got_revision |
| 229 api.path['checkout'] = checkout_path | 231 api.path['checkout'] = checkout_path |
| 230 | 232 |
| 231 # Write a fake .gclient file if none exists. This is required by .isolates. | 233 # Write a fake .gclient file if none exists. This is required by .isolates. |
| 232 dot_gclient = api.path['slave_build'].join('.gclient') | 234 dot_gclient = api.path['slave_build'].join('.gclient') |
| 233 if not api.path.exists(dot_gclient): | 235 if not api.path.exists(dot_gclient): |
| 234 api.skia._writefile(dot_gclient, '') | 236 api.run.writefile(dot_gclient, '') |
| 235 | 237 |
| 236 fix_filemodes(api, api.path['checkout']) | 238 fix_filemodes(api, api.path['checkout']) |
| 237 return got_revision | 239 return got_revision |
| 238 | 240 |
| 239 | 241 |
| 240 def housekeeper_swarm(api, builder_spec, got_revision, infrabots_dir, | 242 def housekeeper_swarm(api, builder_spec, got_revision, infrabots_dir, |
| 241 extra_isolate_hashes): | 243 extra_isolate_hashes): |
| 242 task = trigger_task( | 244 task = trigger_task( |
| 243 api, | 245 api, |
| 244 'housekeeper', | 246 'housekeeper', |
| 245 api.properties['buildername'], | 247 api.properties['buildername'], |
| 246 api.properties['mastername'], | 248 api.properties['mastername'], |
| 247 api.properties['slavename'], | 249 api.properties['slavename'], |
| 248 api.properties['buildnumber'], | 250 api.properties['buildnumber'], |
| 249 builder_spec, | 251 builder_spec, |
| 250 got_revision, | 252 got_revision, |
| 251 infrabots_dir, | 253 infrabots_dir, |
| 252 idempotent=False, | 254 idempotent=False, |
| 253 store_output=False, | 255 store_output=False, |
| 254 extra_isolate_hashes=extra_isolate_hashes) | 256 extra_isolate_hashes=extra_isolate_hashes) |
| 255 return api.skia_swarming.collect_swarming_task(task) | 257 return api.swarming.collect_swarming_task(task) |
| 256 | 258 |
| 257 | 259 |
| 258 def recreate_skps_swarm(api, builder_spec, got_revision, infrabots_dir, | 260 def recreate_skps_swarm(api, builder_spec, got_revision, infrabots_dir, |
| 259 extra_isolate_hashes): | 261 extra_isolate_hashes): |
| 260 task = trigger_task( | 262 task = trigger_task( |
| 261 api, | 263 api, |
| 262 'RecreateSKPs', | 264 'RecreateSKPs', |
| 263 api.properties['buildername'], | 265 api.properties['buildername'], |
| 264 api.properties['mastername'], | 266 api.properties['mastername'], |
| 265 api.properties['slavename'], | 267 api.properties['slavename'], |
| 266 api.properties['buildnumber'], | 268 api.properties['buildnumber'], |
| 267 builder_spec, | 269 builder_spec, |
| 268 got_revision, | 270 got_revision, |
| 269 infrabots_dir, | 271 infrabots_dir, |
| 270 idempotent=False, | 272 idempotent=False, |
| 271 store_output=False, | 273 store_output=False, |
| 272 extra_isolate_hashes=extra_isolate_hashes) | 274 extra_isolate_hashes=extra_isolate_hashes) |
| 273 return api.skia_swarming.collect_swarming_task(task) | 275 return api.swarming.collect_swarming_task(task) |
| 274 | 276 |
| 275 | 277 |
| 276 def infra_swarm(api, got_revision, infrabots_dir, extra_isolate_hashes): | 278 def infra_swarm(api, got_revision, infrabots_dir, extra_isolate_hashes): |
| 277 # Fake the builder spec. | 279 # Fake the builder spec. |
| 278 builder_spec = { | 280 builder_spec = { |
| 279 'builder_cfg': { | 281 'builder_cfg': { |
| 280 'role': 'Infra', | 282 'role': 'Infra', |
| 281 'is_trybot': api.properties['buildername'].endswith('-Trybot'), | 283 'is_trybot': api.properties['buildername'].endswith('-Trybot'), |
| 282 } | 284 } |
| 283 } | 285 } |
| 284 task = trigger_task( | 286 task = trigger_task( |
| 285 api, | 287 api, |
| 286 'infra', | 288 'infra', |
| 287 api.properties['buildername'], | 289 api.properties['buildername'], |
| 288 api.properties['mastername'], | 290 api.properties['mastername'], |
| 289 api.properties['slavename'], | 291 api.properties['slavename'], |
| 290 api.properties['buildnumber'], | 292 api.properties['buildnumber'], |
| 291 builder_spec, | 293 builder_spec, |
| 292 got_revision, | 294 got_revision, |
| 293 infrabots_dir, | 295 infrabots_dir, |
| 294 idempotent=False, | 296 idempotent=False, |
| 295 store_output=False, | 297 store_output=False, |
| 296 extra_isolate_hashes=extra_isolate_hashes) | 298 extra_isolate_hashes=extra_isolate_hashes) |
| 297 return api.skia_swarming.collect_swarming_task(task) | 299 return api.swarming.collect_swarming_task(task) |
| 298 | 300 |
| 299 | 301 |
| 300 def compile_steps_swarm(api, builder_spec, got_revision, infrabots_dir, | 302 def compile_steps_swarm(api, builder_spec, got_revision, infrabots_dir, |
| 301 extra_isolate_hashes, cipd_packages): | 303 extra_isolate_hashes, cipd_packages): |
| 302 builder_name = derive_compile_bot_name(api.properties['buildername'], | 304 builder_name = derive_compile_bot_name(api.properties['buildername'], |
| 303 builder_spec) | 305 builder_spec) |
| 304 compile_builder_spec = builder_spec | 306 compile_builder_spec = builder_spec |
| 305 if builder_name != api.properties['buildername']: | 307 if builder_name != api.properties['buildername']: |
| 306 compile_builder_spec = api.skia.get_builder_spec( | 308 compile_builder_spec = api.core.get_builder_spec( |
| 307 api.path['slave_build'].join('skia'), builder_name) | 309 api.path['slave_build'].join('skia'), builder_name) |
| 308 | 310 |
| 309 extra_hashes = extra_isolate_hashes[:] | 311 extra_hashes = extra_isolate_hashes[:] |
| 310 | 312 |
| 311 # Windows bots require a toolchain. | 313 # Windows bots require a toolchain. |
| 312 if 'Win' in builder_name: | 314 if 'Win' in builder_name: |
| 313 version_file = infrabots_dir.join('assets', 'win_toolchain', 'VERSION') | 315 version_file = infrabots_dir.join('assets', 'win_toolchain', 'VERSION') |
| 314 version = api.skia._readfile(version_file, | 316 version = api.run.readfile(version_file, |
| 315 name='read win_toolchain VERSION', | 317 name='read win_toolchain VERSION', |
| 316 test_data='0').rstrip() | 318 test_data='0').rstrip() |
| 317 version = 'version:%s' % version | 319 version = 'version:%s' % version |
| 318 pkg = ('t', 'skia/bots/win_toolchain', version) | 320 pkg = ('t', 'skia/bots/win_toolchain', version) |
| 319 cipd_packages.append(pkg) | 321 cipd_packages.append(pkg) |
| 320 | 322 |
| 321 if 'Vulkan' in builder_name: | 323 if 'Vulkan' in builder_name: |
| 322 cipd_packages.append(cipd_pkg(api, infrabots_dir, 'win_vulkan_sdk')) | 324 cipd_packages.append(cipd_pkg(api, infrabots_dir, 'win_vulkan_sdk')) |
| 323 | 325 |
| 324 # Fake these properties for compile tasks so that they can be de-duped. | 326 # Fake these properties for compile tasks so that they can be de-duped. |
| (...skipping 10 matching lines...) Expand all Loading... |
| 335 buildnumber, | 337 buildnumber, |
| 336 compile_builder_spec, | 338 compile_builder_spec, |
| 337 got_revision, | 339 got_revision, |
| 338 infrabots_dir, | 340 infrabots_dir, |
| 339 idempotent=True, | 341 idempotent=True, |
| 340 store_output=False, | 342 store_output=False, |
| 341 extra_isolate_hashes=extra_hashes, | 343 extra_isolate_hashes=extra_hashes, |
| 342 cipd_packages=cipd_packages) | 344 cipd_packages=cipd_packages) |
| 343 | 345 |
| 344 # Wait for compile to finish, record the results hash. | 346 # Wait for compile to finish, record the results hash. |
| 345 return api.skia_swarming.collect_swarming_task_isolate_hash(task) | 347 return api.swarming.collect_swarming_task_isolate_hash(task) |
| 346 | 348 |
| 347 | 349 |
| 348 def get_timeouts(builder_cfg): | 350 def get_timeouts(builder_cfg): |
| 349 """Some builders require longer than the default timeouts. | 351 """Some builders require longer than the default timeouts. |
| 350 | 352 |
| 351 Returns tuple of (expiration, hard_timeout, io_timeout). If those values are | 353 Returns tuple of (expiration, hard_timeout, io_timeout). If those values are |
| 352 none then default timeouts should be used. | 354 none then default timeouts should be used. |
| 353 """ | 355 """ |
| 354 expiration = None | 356 expiration = None |
| 355 hard_timeout = None | 357 hard_timeout = None |
| (...skipping 25 matching lines...) Expand all Loading... |
| 381 expiration=expiration, | 383 expiration=expiration, |
| 382 hard_timeout=hard_timeout, | 384 hard_timeout=hard_timeout, |
| 383 io_timeout=io_timeout, | 385 io_timeout=io_timeout, |
| 384 cipd_packages=cipd_packages) | 386 cipd_packages=cipd_packages) |
| 385 | 387 |
| 386 | 388 |
| 387 def perf_steps_collect(api, task, upload_perf_results, got_revision, | 389 def perf_steps_collect(api, task, upload_perf_results, got_revision, |
| 388 is_trybot): | 390 is_trybot): |
| 389 """Wait for perf steps to finish and upload results.""" | 391 """Wait for perf steps to finish and upload results.""" |
| 390 # Wait for nanobench to finish, download the results. | 392 # Wait for nanobench to finish, download the results. |
| 391 api.skia.rmtree(task.task_output_dir) | 393 api.run.rmtree(task.task_output_dir) |
| 392 api.skia_swarming.collect_swarming_task(task) | 394 api.swarming.collect_swarming_task(task) |
| 393 | 395 |
| 394 # Upload the results. | 396 # Upload the results. |
| 395 if upload_perf_results: | 397 if upload_perf_results: |
| 396 perf_data_dir = api.path['slave_build'].join( | 398 perf_data_dir = api.path['slave_build'].join( |
| 397 'perfdata', api.properties['buildername'], 'data') | 399 'perfdata', api.properties['buildername'], 'data') |
| 398 git_timestamp = api.git.get_timestamp(test_data='1408633190', | 400 git_timestamp = api.git.get_timestamp(test_data='1408633190', |
| 399 infra_step=True) | 401 infra_step=True) |
| 400 api.skia.rmtree(perf_data_dir) | 402 api.run.rmtree(perf_data_dir) |
| 401 api.file.makedirs('perf_dir', perf_data_dir, infra_step=True) | 403 api.file.makedirs('perf_dir', perf_data_dir, infra_step=True) |
| 402 src_results_file = task.task_output_dir.join( | 404 src_results_file = task.task_output_dir.join( |
| 403 '0', 'perfdata', api.properties['buildername'], 'data', | 405 '0', 'perfdata', api.properties['buildername'], 'data', |
| 404 'nanobench_%s.json' % got_revision) | 406 'nanobench_%s.json' % got_revision) |
| 405 dst_results_file = perf_data_dir.join( | 407 dst_results_file = perf_data_dir.join( |
| 406 'nanobench_%s_%s.json' % (got_revision, git_timestamp)) | 408 'nanobench_%s_%s.json' % (got_revision, git_timestamp)) |
| 407 api.file.copy('perf_results', src_results_file, dst_results_file, | 409 api.file.copy('perf_results', src_results_file, dst_results_file, |
| 408 infra_step=True) | 410 infra_step=True) |
| 409 | 411 |
| 410 gsutil_path = api.path['slave_build'].join( | 412 gsutil_path = api.path['slave_build'].join( |
| 411 'skia', 'infra', 'bots', '.recipe_deps', 'depot_tools', 'third_party', | 413 'skia', 'infra', 'bots', '.recipe_deps', 'depot_tools', 'third_party', |
| 412 'gsutil', 'gsutil') | 414 'gsutil', 'gsutil') |
| 413 upload_args = [api.properties['buildername'], api.properties['buildnumber'], | 415 upload_args = [api.properties['buildername'], api.properties['buildnumber'], |
| 414 perf_data_dir, got_revision, gsutil_path] | 416 perf_data_dir, got_revision, gsutil_path] |
| 415 if is_trybot: | 417 if is_trybot: |
| 416 upload_args.append(api.properties['issue']) | 418 upload_args.append(api.properties['issue']) |
| 417 api.python( | 419 api.python( |
| 418 'Upload perf results', | 420 'Upload perf results', |
| 419 script=api.skia.resource('upload_bench_results.py'), | 421 script=api.core.resource('upload_bench_results.py'), |
| 420 args=upload_args, | 422 args=upload_args, |
| 421 cwd=api.path['checkout'], | 423 cwd=api.path['checkout'], |
| 422 infra_step=True) | 424 infra_step=True) |
| 423 | 425 |
| 424 | 426 |
| 425 def test_steps_trigger(api, builder_spec, got_revision, infrabots_dir, | 427 def test_steps_trigger(api, builder_spec, got_revision, infrabots_dir, |
| 426 extra_hashes, cipd_packages): | 428 extra_hashes, cipd_packages): |
| 427 """Trigger DM via Swarming.""" | 429 """Trigger DM via Swarming.""" |
| 428 expiration, hard_timeout, io_timeout = get_timeouts( | 430 expiration, hard_timeout, io_timeout = get_timeouts( |
| 429 builder_spec['builder_cfg']) | 431 builder_spec['builder_cfg']) |
| (...skipping 11 matching lines...) Expand all Loading... |
| 441 expiration=expiration, | 443 expiration=expiration, |
| 442 hard_timeout=hard_timeout, | 444 hard_timeout=hard_timeout, |
| 443 io_timeout=io_timeout, | 445 io_timeout=io_timeout, |
| 444 cipd_packages=cipd_packages) | 446 cipd_packages=cipd_packages) |
| 445 | 447 |
| 446 | 448 |
| 447 def test_steps_collect(api, task, upload_dm_results, got_revision, is_trybot, | 449 def test_steps_collect(api, task, upload_dm_results, got_revision, is_trybot, |
| 448 builder_cfg): | 450 builder_cfg): |
| 449 """Collect the test results from Swarming.""" | 451 """Collect the test results from Swarming.""" |
| 450 # Wait for tests to finish, download the results. | 452 # Wait for tests to finish, download the results. |
| 451 api.skia.rmtree(task.task_output_dir) | 453 api.run.rmtree(task.task_output_dir) |
| 452 api.skia_swarming.collect_swarming_task(task) | 454 api.swarming.collect_swarming_task(task) |
| 453 | 455 |
| 454 # Upload the results. | 456 # Upload the results. |
| 455 if upload_dm_results: | 457 if upload_dm_results: |
| 456 dm_dir = api.path['slave_build'].join('dm') | 458 dm_dir = api.path['slave_build'].join('dm') |
| 457 dm_src = task.task_output_dir.join('0', 'dm') | 459 dm_src = task.task_output_dir.join('0', 'dm') |
| 458 api.skia.rmtree(dm_dir) | 460 api.run.rmtree(dm_dir) |
| 459 api.file.copytree('dm_dir', dm_src, dm_dir, infra_step=True) | 461 api.file.copytree('dm_dir', dm_src, dm_dir, infra_step=True) |
| 460 | 462 |
| 461 # Upload them to Google Storage. | 463 # Upload them to Google Storage. |
| 462 api.python( | 464 api.python( |
| 463 'Upload DM Results', | 465 'Upload DM Results', |
| 464 script=api.skia.resource('upload_dm_results.py'), | 466 script=api.core.resource('upload_dm_results.py'), |
| 465 args=[ | 467 args=[ |
| 466 dm_dir, | 468 dm_dir, |
| 467 got_revision, | 469 got_revision, |
| 468 api.properties['buildername'], | 470 api.properties['buildername'], |
| 469 api.properties['buildnumber'], | 471 api.properties['buildnumber'], |
| 470 api.properties['issue'] if is_trybot else '', | 472 api.properties['issue'] if is_trybot else '', |
| 471 api.path['slave_build'].join('skia', 'common', 'py', 'utils'), | 473 api.path['slave_build'].join('skia', 'common', 'py', 'utils'), |
| 472 ], | 474 ], |
| 473 cwd=api.path['checkout'], | 475 cwd=api.path['checkout'], |
| 474 env=api.skia.gsutil_env('chromium-skia-gm.boto'), | 476 env=api.vars.gsutil_env('chromium-skia-gm.boto'), |
| 475 infra_step=True) | 477 infra_step=True) |
| 476 | 478 |
| 477 if builder_cfg['configuration'] == 'Coverage': | 479 if builder_cfg['configuration'] == 'Coverage': |
| 478 upload_coverage_results(api, task, got_revision, is_trybot) | 480 upload_coverage_results(api, task, got_revision, is_trybot) |
| 479 | 481 |
| 480 | 482 |
| 481 def upload_coverage_results(api, task, got_revision, is_trybot): | 483 def upload_coverage_results(api, task, got_revision, is_trybot): |
| 482 results_dir = task.task_output_dir.join('0') | 484 results_dir = task.task_output_dir.join('0') |
| 483 git_timestamp = api.git.get_timestamp(test_data='1408633190', | 485 git_timestamp = api.git.get_timestamp(test_data='1408633190', |
| 484 infra_step=True) | 486 infra_step=True) |
| (...skipping 30 matching lines...) Expand all Loading... |
| 515 api.file.remove('old nanobench JSON', src_nano_file) | 517 api.file.remove('old nanobench JSON', src_nano_file) |
| 516 | 518 |
| 517 # Upload nanobench JSON data. | 519 # Upload nanobench JSON data. |
| 518 gsutil_path = api.depot_tools.gsutil_py_path | 520 gsutil_path = api.depot_tools.gsutil_py_path |
| 519 upload_args = [api.properties['buildername'], api.properties['buildnumber'], | 521 upload_args = [api.properties['buildername'], api.properties['buildnumber'], |
| 520 results_dir, got_revision, gsutil_path] | 522 results_dir, got_revision, gsutil_path] |
| 521 if is_trybot: | 523 if is_trybot: |
| 522 upload_args.append(api.properties['issue']) | 524 upload_args.append(api.properties['issue']) |
| 523 api.python( | 525 api.python( |
| 524 'upload nanobench coverage results', | 526 'upload nanobench coverage results', |
| 525 script=api.skia.resource('upload_bench_results.py'), | 527 script=api.core.resource('upload_bench_results.py'), |
| 526 args=upload_args, | 528 args=upload_args, |
| 527 cwd=api.path['checkout'], | 529 cwd=api.path['checkout'], |
| 528 env=api.skia.gsutil_env('chromium-skia-gm.boto'), | 530 env=api.vars.gsutil_env('chromium-skia-gm.boto'), |
| 529 infra_step=True) | 531 infra_step=True) |
| 530 | 532 |
| 531 # Transform the coverage_by_line_${git_hash}.json file received from | 533 # Transform the coverage_by_line_${git_hash}.json file received from |
| 532 # swarming bot into a coverage_by_line_${git_hash}_${timestamp}.json file. | 534 # swarming bot into a coverage_by_line_${git_hash}_${timestamp}.json file. |
| 533 src_lbl_file = results_dir.join('coverage_by_line_%s.json' % got_revision) | 535 src_lbl_file = results_dir.join('coverage_by_line_%s.json' % got_revision) |
| 534 dst_lbl_file_basename = 'coverage_by_line_%s_%s.json' % ( | 536 dst_lbl_file_basename = 'coverage_by_line_%s_%s.json' % ( |
| 535 got_revision, git_timestamp) | 537 got_revision, git_timestamp) |
| 536 dst_lbl_file = results_dir.join(dst_lbl_file_basename) | 538 dst_lbl_file = results_dir.join(dst_lbl_file_basename) |
| 537 api.file.copy('Line-by-line coverage JSON', src_lbl_file, dst_lbl_file, | 539 api.file.copy('Line-by-line coverage JSON', src_lbl_file, dst_lbl_file, |
| 538 infra_step=True) | 540 infra_step=True) |
| 539 api.file.remove('old line-by-line coverage JSON', src_lbl_file) | 541 api.file.remove('old line-by-line coverage JSON', src_lbl_file) |
| 540 | 542 |
| 541 # Upload line-by-line coverage data. | 543 # Upload line-by-line coverage data. |
| 542 api.gsutil.upload( | 544 api.gsutil.upload( |
| 543 name='upload line-by-line coverage data', | 545 name='upload line-by-line coverage data', |
| 544 source=dst_lbl_file, | 546 source=dst_lbl_file, |
| 545 bucket='skia-infra', | 547 bucket='skia-infra', |
| 546 dest='/'.join(('coverage-json-v1', gs_json_path, | 548 dest='/'.join(('coverage-json-v1', gs_json_path, |
| 547 dst_lbl_file_basename)), | 549 dst_lbl_file_basename)), |
| 548 env={'AWS_CREDENTIAL_FILE': None, 'BOTO_CONFIG': None}, | 550 env={'AWS_CREDENTIAL_FILE': None, 'BOTO_CONFIG': None}, |
| 549 ) | 551 ) |
| 550 | 552 |
| 551 | 553 |
| 552 def cipd_pkg(api, infrabots_dir, asset_name): | 554 def cipd_pkg(api, infrabots_dir, asset_name): |
| 553 """Find and return the CIPD package info for the given asset.""" | 555 """Find and return the CIPD package info for the given asset.""" |
| 554 version_file = infrabots_dir.join('assets', asset_name, 'VERSION') | 556 version_file = infrabots_dir.join('assets', asset_name, 'VERSION') |
| 555 version = api.skia._readfile(version_file, | 557 version = api.run.readfile(version_file, |
| 556 name='read %s VERSION' % asset_name, | 558 name='read %s VERSION' % asset_name, |
| 557 test_data='0').rstrip() | 559 test_data='0').rstrip() |
| 558 version = 'version:%s' % version | 560 version = 'version:%s' % version |
| 559 return (asset_name, 'skia/bots/%s' % asset_name, version) | 561 return (asset_name, 'skia/bots/%s' % asset_name, version) |
| 560 | 562 |
| 561 | 563 |
| 562 def print_properties(api): | 564 def print_properties(api): |
| 563 """Dump out all properties for debugging purposes.""" | 565 """Dump out all properties for debugging purposes.""" |
| 564 props = {} | 566 props = {} |
| 565 for k, v in api.properties.iteritems(): | 567 for k, v in api.properties.iteritems(): |
| (...skipping 11 matching lines...) Expand all Loading... |
| 577 ''', | 579 ''', |
| 578 args=[api.json.input(props)]) | 580 args=[api.json.input(props)]) |
| 579 | 581 |
| 580 | 582 |
| 581 def RunSteps(api): | 583 def RunSteps(api): |
| 582 # TODO(borenet): Remove this once SwarmBucket is working. | 584 # TODO(borenet): Remove this once SwarmBucket is working. |
| 583 print_properties(api) | 585 print_properties(api) |
| 584 | 586 |
| 585 got_revision = checkout_steps(api) | 587 got_revision = checkout_steps(api) |
| 586 infrabots_dir = api.path['checkout'].join('infra', 'bots') | 588 infrabots_dir = api.path['checkout'].join('infra', 'bots') |
| 587 api.skia_swarming.setup( | 589 api.swarming.setup( |
| 588 infrabots_dir.join('tools', 'luci-go'), | 590 infrabots_dir.join('tools', 'luci-go'), |
| 589 swarming_rev='') | 591 swarming_rev='') |
| 590 | 592 |
| 591 # Run gsutil.py to ensure that it's installed. | 593 # Run gsutil.py to ensure that it's installed. |
| 592 api.gsutil(['help']) | 594 api.gsutil(['help']) |
| 593 | 595 |
| 594 extra_hashes = [] | 596 extra_hashes = [] |
| 595 | 597 |
| 596 # Get ready to compile. | 598 # Get ready to compile. |
| 597 compile_cipd_deps = [] | 599 compile_cipd_deps = [] |
| 598 extra_compile_hashes = [] | 600 extra_compile_hashes = [] |
| 599 | 601 |
| 600 infrabots_dir = api.path['checkout'].join('infra', 'bots') | 602 infrabots_dir = api.path['checkout'].join('infra', 'bots') |
| 601 if 'Infra' in api.properties['buildername']: | 603 if 'Infra' in api.properties['buildername']: |
| 602 return infra_swarm(api, got_revision, infrabots_dir, extra_hashes) | 604 return infra_swarm(api, got_revision, infrabots_dir, extra_hashes) |
| 603 | 605 |
| 604 builder_spec = api.skia.get_builder_spec(api.path['checkout'], | 606 builder_spec = api.core.get_builder_spec(api.path['checkout'], |
| 605 api.properties['buildername']) | 607 api.properties['buildername']) |
| 606 builder_cfg = builder_spec['builder_cfg'] | 608 builder_cfg = builder_spec['builder_cfg'] |
| 607 | 609 |
| 608 if 'RecreateSKPs' in api.properties['buildername']: | 610 if 'RecreateSKPs' in api.properties['buildername']: |
| 609 recreate_skps_swarm(api, builder_spec, got_revision, infrabots_dir, | 611 recreate_skps_swarm(api, builder_spec, got_revision, infrabots_dir, |
| 610 extra_hashes) | 612 extra_hashes) |
| 611 return | 613 return |
| 612 | |
| 613 # Android bots require an SDK. | |
| 614 if 'Android' in api.properties['buildername']: | 614 if 'Android' in api.properties['buildername']: |
| 615 compile_cipd_deps.append(cipd_pkg(api, infrabots_dir, 'android_sdk')) | 615 compile_cipd_deps.append(cipd_pkg(api, infrabots_dir, 'android_sdk')) |
| 616 | 616 |
| 617 # Compile. | 617 # Compile. |
| 618 do_compile_steps = builder_spec.get('do_compile_steps', True) | 618 do_compile_steps = builder_spec.get('do_compile_steps', True) |
| 619 if do_compile_steps: | 619 if do_compile_steps: |
| 620 extra_hashes.append(compile_steps_swarm( | 620 extra_hashes.append(compile_steps_swarm( |
| 621 api, builder_spec, got_revision, infrabots_dir, extra_compile_hashes, | 621 api, builder_spec, got_revision, infrabots_dir, extra_compile_hashes, |
| 622 cipd_packages=compile_cipd_deps)) | 622 cipd_packages=compile_cipd_deps)) |
| 623 | 623 |
| (...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 707 test += api.path.exists(*paths) | 707 test += api.path.exists(*paths) |
| 708 | 708 |
| 709 return test | 709 return test |
| 710 | 710 |
| 711 | 711 |
| 712 def GenTests(api): | 712 def GenTests(api): |
| 713 for mastername, slaves in TEST_BUILDERS.iteritems(): | 713 for mastername, slaves in TEST_BUILDERS.iteritems(): |
| 714 for slavename, builders_by_slave in slaves.iteritems(): | 714 for slavename, builders_by_slave in slaves.iteritems(): |
| 715 for builder in builders_by_slave: | 715 for builder in builders_by_slave: |
| 716 yield test_for_bot(api, builder, mastername, slavename) | 716 yield test_for_bot(api, builder, mastername, slavename) |
| OLD | NEW |